def start_analysis(self, options): """Start analysis. @param options: options. @return: operation status. """ log.info("Starting analysis on guest (id=%s, ip=%s)", self.id, self.ip) # TODO: deal with unicode URLs. if options["category"] == "file": options["file_name"] = sanitize_filename(options["file_name"]) # If the analysis timeout is higher than the critical timeout, # automatically increase the critical timeout by one minute. if options["timeout"] > self.timeout: log.debug("Automatically increased critical timeout to %s", self.timeout) self.timeout = options["timeout"] + 60 try: # Wait for the agent to respond. This is done to check the # availability of the agent and verify that it's ready to receive # data. self.wait(CUCKOO_GUEST_INIT) # Invoke the upload of the analyzer to the guest. self.upload_analyzer() # Give the analysis options to the guest, so it can generate the # analysis.conf inside the guest. try: self.server.add_config(options) except: raise CuckooGuestError("{0}: unable to upload config to " "analysis machine".format(self.id)) # If the target of the analysis is a file, upload it to the guest. if options["category"] == "file": try: file_data = open(options["target"], "rb").read() except (IOError, OSError) as e: raise CuckooGuestError("Unable to read {0}, error: " "{1}".format(options["target"], e)) data = xmlrpclib.Binary(file_data) try: self.server.add_malware(data, options["file_name"]) except Exception as e: raise CuckooGuestError("{0}: unable to upload malware to " "analysis machine: {1}".format(self.id, e)) # Launch the analyzer. pid = self.server.execute() log.debug("%s: analyzer started with PID %d", self.id, pid) # If something goes wrong when establishing the connection, raise an # exception and abort the analysis. except (socket.timeout, socket.error): raise CuckooGuestError("{0}: guest communication timeout, check " "networking or try to increase " "timeout".format(self.id))
def start_analysis(self, options): """Start analysis. @param options: options. @return: operation status. """ log.info("Starting analysis on guest (id=%s, ip=%s)", self.id, self.ip) # TODO: deal with unicode URLs. if options["category"] == "file": options["file_name"] = sanitize_filename(options["file_name"]) # If the analysis timeout is higher than the critical timeout, # automatically increase the critical timeout by one minute. if options["timeout"] > self.timeout: log.debug("Automatically increased critical timeout to %s", self.timeout) self.timeout = options["timeout"] + 60 try: # Wait for the agent to respond. This is done to check the # availability of the agent and verify that it's ready to receive # data. self.wait(CUCKOO_GUEST_INIT) # Invoke the upload of the analyzer to the guest. self.upload_analyzer() # Give the analysis options to the guest, so it can generate the # analysis.conf inside the guest. try: self.server.add_config(options) except: raise CuckooGuestError("{0}: unable to upload config to " "analysis machine".format(self.id)) # If the target of the analysis is a file, upload it to the guest. if options["category"] == "file": try: file_data = open(options["target"], "rb").read() except (IOError, OSError) as e: raise CuckooGuestError("Unable to read {0}, error: " "{1}".format(options["target"], e)) data = xmlrpclib.Binary(file_data) try: self.server.add_malware(data, options["file_name"]) except Exception as e: raise CuckooGuestError("{0}: unable to upload malware to " "analysis machine: {1}".format( self.id, e)) # Launch the analyzer. pid = self.server.execute() log.debug("%s: analyzer started with PID %d", self.id, pid) # If something goes wrong when establishing the connection, raise an # exception and abort the analysis. except (socket.timeout, socket.error): raise CuckooGuestError("{0}: guest communication timeout, check " "networking or try to increase " "timeout".format(self.id))
def process_new_task_files(request, samples, details, opt_filename, unique): list_of_files = [] for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: details["errors"].append( {sample.name: "You uploaded an empty file."}) continue elif sample.size > web_cfg.general.max_sample_size: details["errors"].append({ sample.name: "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }) continue if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to CAPE temporary storage to let it persist between reboot (if user like to configure it in that way). try: path = store_temp_file(sample.read(), filename) except OSError: details["errors"].append({ filename: "Your specified temp folder, disk is out of space. Clean some space before continue." }) continue sha256 = File(path).get_sha256() if (not request.user.is_staff and (web_cfg.uniq_submission.enabled or unique) and db.check_file_uniq(sha256, hours=web_cfg.uniq_submission.hours)): details["errors"].append({ filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission" }) continue content = get_file_content(path) list_of_files.append((content, path, sha256)) return list_of_files, details
def _sf_chlildren(child): path_to_extract = False _, ext = os.path.splitext(child.filename) ext = ext.lower() if ext in demux_extensions_list or is_valid_type(child.magic): target_path = os.path.join(tmp_path, b"cuckoo-sflock") if not os.path.exists(target_path): os.mkdir(target_path) tmp_dir = tempfile.mkdtemp(dir=target_path) try: if child.contents: path_to_extract = os.path.join(tmp_dir, sanitize_filename((child.filename).decode()).encode()) with open(path_to_extract, "wb") as f: f.write(child.contents) except Exception as e: log.error(e, exc_info=True) return path_to_extract
def download_from_vt(vtdl, details, opt_filename, settings): for h in get_hash_list(vtdl): folder = os.path.join(settings.VTDL_PATH, "cape-vt") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix='vtdl', dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(h) paths = db.sample_path_by_hash(h) # clean old content if "content" in details: del details["content"] if paths: details["content"] = get_file_content(paths) if settings.VTDL_KEY: details["headers"] = {'x-apikey': settings.VTDL_KEY} elif details.get("apikey", False): details["headers"] = {'x-apikey': details["apikey"]} else: details["errors"].append({ "error": "Apikey not configured, neither passed as opt_apikey" }) return details details[ "url"] = "https://www.virustotal.com/api/v3/files/{id}/download".format( id=h.lower()) details["fhash"] = h details["path"] = filename details["service"] = "VirusTotal" if not details.get("content", False): status, task_ids_tmp = download_file(**details) else: details["service"] = "Local" status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({h: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp return details
def index(request, resubmit_hash=False): if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) if paths: content = get_file_content(paths) if not content: return render( request, "error.html", { "error": "Can't find {} on disk, {}".format( resubmission_hash, str(paths)) }) folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename( resubmission_hash) path = store_temp_file(content, filename) details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmission_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) else: return render( request, "error.html", {"error": "File not found on hdd for resubmission"}) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") details["service"] = "WebGUI" for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: details["errors"].append( {sample.name: "You uploaded an empty file."}) continue elif sample.size > settings.MAX_UPLOAD_SIZE: details["errors"].append({ sample.name: "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }) continue if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to CAPE temporary storage to let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), filename) sha256 = File(path).get_sha256() if (not request.user.is_staff and (web_conf.uniq_submission.enabled or unique) and db.check_file_uniq( sha256, hours=web_conf.uniq_submission.hours)): details["errors"].append({ filename: "Duplicated file, disable unique option on submit or in conf/web.conf to force submission" }) continue if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) details["path"] = path details["content"] = get_file_content(path) status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({sample.name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif "static" in request.FILES: samples = request.FILES.getlist("static") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in conf/web.conf." }, ) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }, ) else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): url = request.POST.get("dlnexec").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") response = _download_file(request.POST.get("route"), url, options) if not response: return render(request, "error.html", {"error": "Was impossible to retrieve url"}) name = os.path.basename(url) if not "." in name: name = get_user_filename(options, custom) or generate_fake_name() path = store_temp_file(response, name) details["path"] = path details["content"] = get_file_content(path) details["service"] = "DLnExec" details["source_url"] = url status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append({name: task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt( request.POST.get("vtdl").strip(), details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config( machinery), machinery).get("machines").split(",") ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )
def index(request, resubmit_hash=False): if request.method == "POST": package = request.POST.get("package", "") timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24) options = request.POST.get("options", "") lin_options = request.POST.get("lin_options", "") priority = force_int(request.POST.get("priority")) machine = request.POST.get("machine", "") clock = request.POST.get( "clock", datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")) if not clock: clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S") if "1970" in clock: clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S") custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) enforce_timeout = bool(request.POST.get("enforce_timeout", False)) referrer = validate_referrer(request.POST.get("referrer", None)) tags = request.POST.get("tags", None) static = bool(request.POST.get("static", False)) all_tags = load_vms_tags() if tags and not all( [tag.strip() in all_tags for tag in tags.split(",")]): return render(request, "error.html", { "error": "Check Tags help, you have introduced incorrect tag(s)" }) if lin_options: options = lin_options # This is done to remove spaces in options but not breaks custom paths options = ','.join('='.join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and '=' in option) opt_filename = get_user_filename(options, custom) if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("route", None): options += "route={0},".format(request.POST.get("route", None)) if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "loader=oldloader.exe,loader_64=oldloader_x64.exe," if request.POST.get("unpack"): options += "unpack=yes," options = options[:-1] unique = request.POST.get("unique", False) orig_options = options task_ids = [] task_machines = [] status = "ok" failed_hashes = list() task_ids_tmp = list() if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != '': resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) if paths: paths = [ _f for _f in [ path if os.path.exists(path) else False for path in paths ] if _f ] if not paths and FULL_DB: tasks = results_db.analysis.find( {"dropped.sha256": resubmission_hash}, { "info.id": 1, "_id": 0 }) if tasks: for task in tasks or []: # grab task id and replace in path if needed aka distributed hack path = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(task["info"]["id"]), "files", resubmission_hash) if os.path.exists(path): paths = [path] break if paths: content = False content = get_file_content(paths) if not content: return render( request, "error.html", { "error": "Can't find {} on disk, {}".format( resubmission_hash, str(paths)) }) base_dir = tempfile.mkdtemp(prefix='resubmit_', dir=settings.TEMP_PATH) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename( resubmission_hash) path = store_temp_file(content, filename) headers = {} url = 'local' params = {} status, task_ids = download_file( False, content, request, db, task_ids, url, params, headers, "Local", path, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static) else: return render( request, "error.html", {"error": "File not found on hdd for resubmission"}) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size " "specified in web/web/local_settings.py." }) if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), filename) if unique and db.check_file_uniq(File(path).get_sha256()): return render( request, "error.html", { "error": "Duplicated file, disable unique option to force submission" }) magic_type = get_magic_type(path) if disable_x64 is True: if magic_type and ("x86-64" in magic_type or "PE32+" in magic_type): if len(samples) == 1: return render( request, "error.html", {"error": "Sorry no x64 support yet"}) else: continue orig_options, timeout, enforce_timeout = recon( path, orig_options, timeout, enforce_timeout) platform = get_platform(magic_type) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if hasattr(machine_details, "platform" ) and not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None try: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, platform=platform, enforce_timeout=enforce_timeout, tags=tags, clock=clock, static=static) task_ids.extend(task_ids_new) except CuckooDemuxError as err: return render(request, "error.html", {"error": err}) elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: pass if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform="windows") ] elif machine: machine_details = db.view_machine(machine) if not machine_details.platform == "windows": return render( request, "error.html", { "error": "Wrong platform, linux VM selected for {} sample" .format(machine_details.platform) }) else: task_machines = [machine] if not task_machines: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, tags=tags, enforce_timeout=enforce_timeout, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif "static" in request.FILES: samples = request.FILES.getlist("static") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) task_id = db.add_static(file_path=path, priority=priority) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) task_ids.append(task_id) elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority) if task_id: task_ids.append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform="windows") ] elif machine: machine_details = db.view_machine(machine) if not machine_details.platform == "windows": return render( request, "error.html", { "error": "Wrong platform, linux VM selected for {} sample". format(machine_details.platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.add_url(url=url, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): url = request.POST.get("dlnexec").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") response = _download_file(request.POST.get("route", None), url, options) if not response: return render(request, "error.html", {"error": "Was impossible to retrieve url"}) name = os.path.basename(url) if not "." in name: name = get_user_filename(options, custom) or generate_fake_name() path = store_temp_file(response, name) magic_type = get_magic_type(path) platform = get_platform(magic_type) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine[0]) if not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, platform=platform, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) \ and request.POST.get("vtdl")[0] != '': vtdl = request.POST.get("vtdl").strip() if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your " "VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory" }) else: hashlist = [] if "," in vtdl: hashlist = [ _f for _f in vtdl.replace(" ", "").strip().split(",") if _f ] else: hashlist.append(vtdl) for h in hashlist: base_dir = tempfile.mkdtemp(prefix='cuckoovtdl', dir=settings.VTDL_PATH) task_ids_tmp = list() if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(h) headers = {} paths = db.sample_path_by_hash(h) content = False if paths: content = get_file_content(paths) if settings.VTDL_PRIV_KEY: headers = {'x-apikey': settings.VTDL_PRIV_KEY} elif settings.VTDL_INTEL_KEY: headers = {'x-apikey': settings.VTDL_INTEL_KEY} url = "https://www.virustotal.com/api/v3/files/{id}/download".format( id=h) params = {} if not content: status, task_ids_tmp = download_file( False, content, request, db, task_ids, url, params, headers, "VirusTotal", filename, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static, h) else: status, task_ids_tmp = download_file( False, content, request, db, task_ids, url, params, headers, "Local", filename, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static, h) if status is "ok": task_ids = task_ids_tmp else: failed_hashes.append(h) if not isinstance(task_ids, list) and status == "error": # is render msg return task_ids if not isinstance(task_ids_tmp, list) and status == "error": # is render msg return task_ids_tmp if isinstance(task_ids, list): tasks_count = len(task_ids) else: # ToDo improve error msg tasks_count = 0 tasks_count = len(task_ids) if tasks_count > 0: data = {"tasks": task_ids, "tasks_count": tasks_count} if failed_hashes: data["failed_hashes"] = failed_hashes return render(request, "submission/complete.html", data) else: return render(request, "error.html", {"error": "Error adding task to Cuckoo's database."}) else: enabledconf = dict() enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled all_tags = load_vms_tags() if all_tags: enabledconf["tags"] = True if not enabledconf["tags"]: #Â load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config( machinery), machinery).get("machines").split(",") ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "description", False) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "socks5s": list(socks5s.values()), "socks5s_random": socks5s_random, "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_tags))) })
def index(request, resubmit_hash=False): remote_console = False if request.method == "POST": ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, ) = parse_request_arguments(request) # This is done to remove spaces in options but not breaks custom paths options = ",".join("=".join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and "=" in option) opt_filename = get_user_filename(options, custom) if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff: priority = web_conf.public.priority if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff: timeout = web_conf.public.timeout if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if web_conf.guacamole.enabled and request.POST.get( "interactive_desktop"): remote_console = True if "nohuman=yes," not in options: options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "no-iat=1," if request.POST.get("unpack"): options += "unpack=yes," job_category = False if request.POST.get("job_category"): job_category = request.POST.get("job_category") # amsidump is enabled by default in the monitor for Win10+ if web_conf.amsidump.enabled and not request.POST.get("amsidump"): options += "amsidump=0," options = options[:-1] opt_apikey = False opts = get_options(options) if opts: opt_apikey = opts.get("apikey", False) status = "ok" task_ids_tmp = [] existent_tasks = {} details = { "errors": [], "content": False, "request": request, "task_ids": [], "url": False, "params": {}, "headers": {}, "service": "Local", "path": "", "fhash": False, "options": options, "only_extraction": False, "user_id": request.user.id or 0, } task_category = False samples = [] if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != "": task_category = "resubmit" samples = request.POST.get("hash").strip().split(",") elif "sample" in request.FILES: task_category = "sample" samples = request.FILES.getlist("sample") elif "quarantine" in request.FILES: task_category = "quarantine" samples = request.FILES.getlist("quarantine") elif "static" in request.FILES: task_category = "static" samples = request.FILES.getlist("static") elif "pcap" in request.FILES: task_category = "pcap" samples = request.FILES.getlist("pcap") elif "url" in request.POST and request.POST.get("url").strip(): task_category = "url" samples = request.POST.get("url").strip() elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): task_category = "dlnexec" samples = request.POST.get("dlnexec").strip() elif (settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) and request.POST.get("vtdl")[0] != ""): task_category = "vtdl" samples = request.POST.get("vtdl").strip() list_of_files = [] if task_category in ("url", "dlnexec"): if not samples: return render(request, "error.html", {"error": "You specified an invalid URL!"}) for url in samples.split(","): url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if task_category == "dlnexec": path, content, sha256 = process_new_dlnexec_task( url, route, options, custom) if path: list_of_files.append((content, path, sha256)) elif task_category == "url": list_of_files.append(("", url, "")) elif task_category in ("sample", "quarantine", "static", "pcap"): list_of_files, details = process_new_task_files( request, samples, details, opt_filename, unique) elif task_category == "resubmit": for hash in samples: paths = [] if len(hash) in (32, 40, 64): paths = db.sample_path_by_hash(hash) else: task_binary = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(hash), "binary") if os.path.exists(task_binary): paths.append(task_binary) else: tmp_paths = db.find_sample(task_id=int(hash)) if not tmp_paths: details["errors"].append( {hash: "Task not found for resubmission"}) continue for tmp_sample in tmp_paths: path = False tmp_dict = tmp_sample.to_dict() if os.path.exists(tmp_dict.get("target", "")): path = tmp_dict["target"] else: tmp_tasks = db.find_sample( sample_id=tmp_dict["sample_id"]) for tmp_task in tmp_tasks: tmp_path = os.path.join( settings.CUCKOO_PATH, "storage", "binaries", tmp_task.to_dict()["sha256"]) if os.path.exists(tmp_path): path = tmp_path break if path: paths.append(path) if not paths: details["errors"].append( {hash: "File not found on hdd for resubmission"}) continue content = get_file_content(paths) if not content: details["errors"].append( {hash: f"Can't find {hash} on disk"}) continue folder = os.path.join(settings.TEMP_PATH, "cape-resubmit") if not os.path.exists(folder): os.makedirs(folder) base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(hash) path = store_temp_file(content, filename) list_of_files.append((content, path, hash)) # Hack for resubmit first find all files and then put task as proper category if job_category and job_category in ("resubmit", "sample", "quarantine", "static", "pcap", "dlnexec", "vtdl"): task_category = job_category if task_category == "resubmit": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(filename): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", hash, search_limit=5) for record in records or []: existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) elif task_category == "sample": details["service"] = "WebGUI" for content, path, sha256 in list_of_files: if web_conf.pre_script.enabled and "pre_script" in request.FILES: pre_script = request.FILES["pre_script"] details["pre_script_name"] = request.FILES[ "pre_script"].name details["pre_script_content"] = pre_script.read() if web_conf.during_script.enabled and "during_script" in request.FILES: during_script = request.FILES["during_script"] details["during_script_name"] = request.FILES[ "during_script"].name details["during_script_content"] = during_script.read() if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout: timeout = web_conf.public.timeout details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault( record["target"]["file"]["sha256"], []).append(record) details["task_ids"] = task_ids_tmp elif task_category == "quarantine": for content, tmp_path, sha256 in list_of_files: path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: print(e) if not path: details["errors"].append({ os.path.basename(path): "You uploaded an unsupported quarantine file." }) continue details["path"] = path details["content"] = content status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "static": for content, path, sha256 in list_of_files: task_id = db.add_static(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) details["task_ids"] += task_id elif task_category == "pcap": for content, path, sha256 in list_of_files: if path.lower().endswith(b".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: details["errors"].append({ os.path.basename(path): "Conversion from SAZ to PCAP failed." }) continue task_id = db.add_pcap(file_path=path, priority=priority, tlp=tlp, user_id=request.user.id or 0) if task_id: details["task_ids"].append(task_id) elif task_category == "url": for _, url, _ in list_of_files: if machine.lower() == "all": machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if platform and hasattr( machine_details, "platform" ) and not machine_details.platform == platform: details["errors"].append({ os.path.basename(url): f"Wrong platform, {machine_details.platform} VM selected for {platform} sample" }) continue else: machines = [machine] else: machines = [None] for entry in machines: task_id = db.add_url( url=url, package=package, timeout=timeout, priority=priority, options=options, machine=entry, platform=platform, tags=tags, custom=custom, memory=memory, enforce_timeout=enforce_timeout, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, route=route, cape=cape, tags_tasks=tags_tasks, user_id=request.user.id or 0, ) details["task_ids"].append(task_id) elif task_category == "dlnexec": for content, path, sha256 in list_of_files: details["path"] = path details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples status, task_ids_tmp = download_file(**details) if status == "error": details["errors"].append( {os.path.basename(path): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp elif task_category == "vtdl": if not settings.VTDL_KEY or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory" }, ) else: if opt_apikey: details["apikey"] = opt_apikey details = download_from_vt(samples, details, opt_filename, settings) if details.get("task_ids"): tasks_count = len(details["task_ids"]) else: tasks_count = 0 if tasks_count > 0: data = { "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], "existent_tasks": existent_tasks, "remote_console": remote_console, } return render(request, "submission/complete.html", data) else: return render( request, "error.html", { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"] }) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled enabledconf["tlp"] = web_conf.tlp.enabled enabledconf["timeout"] = cfg.timeouts.default enabledconf["amsidump"] = web_conf.amsidump.enabled enabledconf["pre_script"] = web_conf.pre_script.enabled enabledconf["during_script"] = web_conf.during_script.enabled if all_vms_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") if x.strip() ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in str( getattr(Config(machinery), machinery).get( "machines")).split(",") if x.strip() ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" vpn_random = "" if routing.socks5.random_socks5 and socks5s: socks5s_random = random.choice(socks5s.values()).get("name", False) if routing.vpn.random_vpn: vpn_random = random.choice(list(vpns.values())).get("name", False) if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "name", False) random_route = False if vpn_random and socks5s_random: random_route = random.choice((vpn_random, socks5s_random)) elif vpn_random: random_route = vpn_random elif socks5s_random: random_route = socks5s_random existent_tasks = {} if resubmit_hash: if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", resubmit_hash, search_limit=5) for record in records: existent_tasks.setdefault( record["target"]["file"]["sha256"], list()) existent_tasks[record["target"]["file"]["sha256"]].append( record) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "random_route": random_route, "socks5s": list(socks5s.values()), "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_vms_tags))), "existent_tasks": existent_tasks, "all_exitnodes": all_nodes_exits_list, }, )
def run(self, results): self.noinject = self.options.get("noinject", False) self.resublimit = int(self.options.get("resublimit", 5)) self.distributed = self.options.get("distributed", False) self.resuburl = self.options.get( "url", "http://127.0.0.1:8000/apiv2/tasks/create/file/") self.job_cache_timeout_minutes = self.options.get( "job_cache_timeout_minutes", 180) filesdict = {} self.task_options_stack = [] self.task_options = None self.task_custom = None self.machine = None self.resubcnt = 0 self.sigfile_list = [] report = dict(results) self.results = results if (report["info"].get("options", {}).get("resubmitjob")) or ( "Parent_Task_ID" in results.get("info", {}).get("custom", "")): log.warning("Bailing out of resubexe this is a child task") return if results.get("signatures"): for entry in results.get("signatures", []): if entry.get("name", "") == "zwhitelistedcerts": if entry.get("data", []): log.info( "Skipping resub our top listed object was signed by a whitelisted cert" ) return try: if results.get("signatures"): for entry in results.get("signatures", []): if entry.get("name", "") == "office_write_exe": exe_writes = entry.get("data", []) for entry in exe_writes: mfile = entry.get("office_write_exe_magic", "") if mfile: mfile2 = re.sub( r"_[A-Za-z0-9]+\.[Ee][Xx][Ee]$", "", mfile) if mfile2 not in self.sigfile_list: self.sigfile_list.append(mfile2) except Exception as e: log.info("Problem hunting for office exe magic files %s", e) if report["info"].get("options"): for key, val in list(report["info"]["options"].items()): self.task_options_stack.append(f"{key}={val}") if report["info"].get("machine"): self.machine = report["info"]["machine"]["label"] # copy TLP from current if report["info"].get("tlp", False): self.tlp = report["info"]["tlp"] self.task_options_stack.append("resubmitjob=true") if self.noinject: self.task_options_stack.append("free=true") if self.task_options_stack: self.task_options = ",".join(self.task_options_stack) report = dict(results) for dropped in report.get("dropped", []): if results["target"]["category"] == "file" and self.results[ "target"]["file"]["sha256"] == dropped["sha256"]: continue skip_it = False for gpath in dropped["guest_paths"]: if "." in gpath and not skip_it: lfile = ntpath.basename(gpath).lower() if lfile in whitelisted_names: skip_it = True if skip_it: continue if (os.path.isfile(dropped["path"]) and dropped["size"] > 0xA2 and (all(x not in whitelisted_names for x in dropped["name"])) and ("Security: 1" not in dropped["type"])): if ((("PE32" in dropped["type"] or "MS-DOS" in dropped["type"]) and "DLL" not in dropped["type"] and "native" not in dropped["type"]) or any(x in dropped["type"] for x in interesting_file_types) and dropped["name"]): if dropped["sha256"] not in filesdict: srcpath = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(report["info"]["id"]), "files", dropped["sha256"]) linkdir = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(report["info"]["id"]), "files", f"{dropped['sha256']}_link") guest_name = ntpath.basename(dropped["name"][0]) linkpath = os.path.join(linkdir, guest_name) if not os.path.exists(linkdir): os.makedirs(linkdir, mode=0o755) try: if not os.path.exists(linkpath): os.symlink(srcpath, linkpath) filesdict[dropped["sha256"]] = linkpath except Exception: filesdict[dropped["sha256"]] = dropped["path"] else: for gpath in dropped["guest_paths"]: if "." in gpath: lfile = ntpath.basename(gpath).lower() base, ext = ntpath.splitext(lfile) if ext in interesting_file_extensions or gpath in self.sigfile_list: if dropped["sha256"] not in filesdict: srcpath = os.path.join( CUCKOO_ROOT, "storage", "analyses", str(report["info"]["id"]), "files", dropped["sha256"]) linkdir = os.path.join( CUCKOO_ROOT, "storage", "analyses", str(report["info"]["id"]), "files", f"{dropped['sha256']}_link", ) linkpath = os.path.join( linkdir, ntpath.basename(gpath)) if not os.path.exists(linkdir): os.makedirs(linkdir, mode=0o755) try: if not os.path.exists(linkpath): os.symlink(srcpath, linkpath) filesdict[dropped["sha256"]] = linkpath except Exception: filesdict[dropped["sha256"]] = dropped[ "path"] if report.get("suricata", {}).get("files"): for suricata_file_e in results["suricata"]["files"]: if not suricata_file_e.get("file_info", {}): continue # don't resubmit truncated files if suricata_file_e.get("file_info", {}).get( "size", -1) != suricata_file_e.get("size", -2): continue if (results["target"]["category"] == "file" and results["target"]["file"]["sha256"] == suricata_file_e["file_info"]["sha256"]): continue if "file_info" in suricata_file_e: tmp_suricata_file_d = dict(suricata_file_e) if os.path.isfile(suricata_file_e["file_info"]["path"]): ftype = suricata_file_e["file_info"]["type"] if ("PE32" in ftype or "MS-DOS" in ftype ) and "DLL" not in ftype and "native" not in ftype: if suricata_file_e["file_info"][ "sha256"] not in filesdict: filesdict[suricata_file_e["file_info"] ["sha256"]] = suricata_file_e[ "file_info"]["path"] db = Database() for e in filesdict: if not File(filesdict[e]).get_size(): continue if self.resubcnt >= self.resublimit: log.info("Hit resub limit of %d, stopping iteration", self.resublimit) break find_sample = db.find_sample(sha256=e) if find_sample: stasks = db.list_tasks(sample_id=find_sample.id) subbed_hash = False added_previous = False for entry in stasks: if subbed_hash: continue tid = entry.id tstart = entry.started_on cat = entry.category target = entry.target if cat == "file": if (((tstart + datetime.timedelta( minutes=self.job_cache_timeout_minutes)) > datetime.datetime.utcnow()) and target and os.path.basename(target) == sanitize_filename( os.path.basename(filesdict[e])) ) and tid not in self.results.get("resubs", []): log.info( "Adding previous task run to our resub list %s for hash %s and filename %s", tid, e, filesdict[e]) self.results.setdefault("resubs", []).append(tid) added_previous = True continue else: if not added_previous and not subbed_hash: self.task_custom = f"Parent_Task_ID:{report['info']['id']}" if report["info"].get("custom"): self.task_custom = f"{self.task_custom} Parent_Custom:{report['info']['custom']}" task_ids_new = None if self.distributed and self.resuburl: options = { "priority": 1, "options": self.task_options, "custom": self.task_custom, "parent_id": int(report["info"]["id"]), "timeout": 90, } multipart_file = [ ("file", (os.path.basename(filesdict[e]), open(filesdict[e], "rb"))) ] try: log.info( "Going to try to resub %s via the api", filesdict[e]) res = requests.post( self.resuburl, files=multipart_file, data=options) if res and res.ok: task_ids_new = res.json( )["data"]["task_ids"] except Exception as e: log.error(e) else: task_ids_new = db.demux_sample_and_add_to_db( file_path=filesdict[e], package="", timeout=0, priority=1, options=self.task_options, machine="", platform=None, tags=None, custom=self.task_custom, memory=False, enforce_timeout=False, clock=None, shrike_url=None, shrike_msg=None, shrike_sid=None, shrike_refer=None, ) if task_ids_new: for task_id in task_ids_new: log.info( 'Resubmitexe file "%s" added as task with ID %s resub count %s', filesdict[e], task_id, self.resubcnt, ) self.results.setdefault( "resubs", []).append(task_id) self.resubcnt += 1 subbed_hash = True else: self.task_custom = f"Parent_Task_ID:{report['info']['id']}" if report["info"].get("custom"): self.task_custom = f"{self.task_custom} Parent_Custom:{report['info']['custom']}" task_ids_new = None if self.distributed and self.resuburl: options = { "priority": 1, "options": self.task_options, "custom": self.task_custom, "parent_id": int(report["info"]["id"]), "timeout": 90, } multipart_file = [("file", (os.path.basename(filesdict[e]), open(filesdict[e], "rb")))] try: log.info("Going to try to resub %s via the api", filesdict[e]) res = requests.post(self.resuburl, files=multipart_file, data=options) if res and res.ok: task_ids_new = res.json()["data"]["task_ids"] except Exception as e: log.error(e) else: task_ids_new = db.demux_sample_and_add_to_db( file_path=filesdict[e], package="", timeout=0, priority=1, options=self.task_options, machine="", platform=None, tags=None, custom=self.task_custom, memory=False, enforce_timeout=False, clock=None, shrike_url=None, shrike_msg=None, shrike_sid=None, shrike_refer=None, ) if task_ids_new: for task_id in task_ids_new: log.info( 'Resubmitexe file "%s" added as task with ID %s resub count %s', filesdict[e], task_id, self.resubcnt) self.results.setdefault("resubs", []).append(task_id) self.resubcnt += 1 else: log.warn("Error adding resubmitexe task to database")
def start_analysis(self, options): """Start analysis. @param options: options. @return: operation status. """ log.info("Starting analysis on guest (id=%s, ip=%s)", self.id, self.ip) if misc_config.ENABLE_CUCKOO_EXTRA_INFO: time.sleep(10) subprocess.call([misc_config.ADB_PATH, "connect", "192.168.56.10"]) log.info("Starting to collect information") # Custom: Get process information try: self.getProcessList() # Get listening Ports self.getListeningPorts() self.generateFileList() except: log.info("ADB Error occured! Try again...") try: subprocess.Popen([misc_config.ADB_PATH, "kill-server"]) subprocess.Popen(["killall adb"]) time.sleep(2) subprocess.call( [misc_config.ADB_PATH, "connect", "192.168.56.10"]) time.sleep(5) self.getProcessList() # Get listening Ports self.getListeningPorts() self.generateFileList() except: log.info("ADB Error for the second time!") # TODO: deal with unicode URLs. if options["category"] == "file": options["file_name"] = sanitize_filename(options["file_name"]) # If the analysis timeout is higher than the critical timeout, # automatically increase the critical timeout by one minute. if options["timeout"] > self.timeout: log.debug("Automatically increased critical timeout to %s", self.timeout) self.timeout = options["timeout"] + 60 # Get and set dynamically generated resultserver port. options["port"] = str(ResultServer().port) try: # Wait for the agent to respond. This is done to check the # availability of the agent and verify that it's ready to receive # data. self.wait(CUCKOO_GUEST_INIT) # Invoke the upload of the analyzer to the guest. self.upload_analyzer() # Give the analysis options to the guest, so it can generate the # analysis.conf inside the guest. try: self.server.add_config(options) except: raise CuckooGuestError("{0}: unable to upload config to " "analysis machine".format(self.id)) # If the target of the analysis is a file, upload it to the guest. if options["category"] == "file": try: file_data = open(options["target"], "rb").read() except (IOError, OSError) as e: raise CuckooGuestError("Unable to read {0}, error: " "{1}".format(options["target"], e)) data = xmlrpclib.Binary(file_data) try: self.server.add_malware(data, options["file_name"]) except Exception as e: raise CuckooGuestError("{0}: unable to upload malware to " "analysis machine: {1}".format( self.id, e)) # Launch the analyzer. pid = self.server.execute() log.debug("%s: analyzer started with PID %d", self.id, pid) # If something goes wrong when establishing the connection, raise an # exception and abort the analysis. except (socket.timeout, socket.error): raise CuckooGuestError("{0}: guest communication timeout, check " "networking or try to increase " "timeout".format(self.id)) # Custom # Give the app some time to start up log.debug("Starting to simulate user interaction") time.sleep(10) self.simulateUserInteraction()
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", help="URL, path to the file or folder to analyze") parser.add_argument("-d", "--debug", action="store_true", help="Enable debug logging") parser.add_argument( "--remote", type=str, action="store", default=None, help="Specify IP:port to a Cuckoo API server to submit remotely", required=False, ) parser.add_argument("--user", type=str, action="store", default=None, help="Username for Basic Auth", required=False) parser.add_argument("--password", type=str, action="store", default=None, help="Password for Basic Auth", required=False) parser.add_argument("--sslnoverify", action="store_true", default=False, help="Do not validate SSL cert", required=False) parser.add_argument("--ssl", action="store_true", default=False, help="Use SSL/TLS for remote", required=False) parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "--options", type=str, action="store", default="", help= 'Specify options for the analysis package (e.g. "name=value,name2=value2")', required=False, ) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False, ) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False, ) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False, ) parser.add_argument( "--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument( "--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False, ) parser.add_argument("--clock", type=str, action="store", default=None, help="Set virtual machine clock", required=False) parser.add_argument( "--tags", type=str, action="store", default=None, help="Specify tags identifier of a machine you want to use", required=False, ) parser.add_argument("--max", type=int, action="store", default=None, help="Maximum samples to add in a row", required=False) parser.add_argument("--pattern", type=str, action="store", default=None, help="Pattern of files to submit", required=False) parser.add_argument("--shuffle", action="store_true", default=False, help="Shuffle samples before submitting them", required=False) parser.add_argument("--unique", action="store_true", default=False, help="Only submit new samples, ignore duplicates", required=False) parser.add_argument("--quiet", action="store_true", default=False, help="Only print text on failure", required=False) parser.add_argument("--procdump", action="store_true", default=False, help="Dump, upload and process proc/memdumps", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False # If the quiet flag has been set, then we also disable the "warning" # level of the logging module. (E.g., when pydeep has not been installed, # there will be a warning message, because Cuckoo can't resolve the # ssdeep hash of this particular sample.) if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig() if args.quiet: logging.disable(logging.WARNING) db = Database() target = to_unicode(args.target) sane_timeout = min(args.timeout, 60 * 60 * 24) if args.procdump: if args.options: args.options = ",procdump=1" else: args.options = "procdump=1" if args.url: if args.remote: if not HAVE_REQUESTS: print(( bold(red("Error")) + ": you need to install python-requests (`pip3 install requests`)" )) return False if args.ssl: url = "https://{0}/tasks/create/url".format(args.remote) else: url = "http://{0}/tasks/create/url".format(args.remote) data = dict( url=target, package=args.package, timeout=sane_timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags, ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user, args.password), data=data, verify=verify) else: response = requests.post(url, auth=(args.user, args.password), data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, data=data, verify=verify) else: response = requests.post(url, data=data) except Exception as e: print((bold(red("Error")) + ": unable to send URL: {0}".format(e))) return False json = response.json() task_id = json["task_id"] else: task_id = db.add_url( target, package=args.package, timeout=sane_timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags, ) if task_id: if not args.quiet: print((bold(green("Success")) + ': URL "{0}" added as task with ID {1}'.format( target, task_id))) else: print((bold(red("Error")) + ": adding task to database")) else: # Get absolute path to deal with relative. path = to_unicode(os.path.abspath(target)) if not os.path.exists(path): print((bold(red("Error")) + ': the specified file/folder does not exist at path "{0}"'. format(path))) return False files = [] if os.path.isdir(path): for dirname, _, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): if args.pattern: if fnmatch.fnmatch(file_name, args.pattern): files.append(to_unicode(file_path)) else: files.append(to_unicode(file_path)) else: files.append(path) if args.shuffle: random.shuffle(files) else: files = sorted(files) for file_path in files: if not File(file_path).get_size(): if not args.quiet: print((bold( yellow("Empty") + ": sample {0} (skipping file)".format(file_path)))) continue if args.max is not None: # Break if the maximum number of samples has been reached. if not args.max: break args.max -= 1 if args.remote: if not HAVE_REQUESTS: print(( bold(red("Error")) + ": you need to install python-requests (`pip3 install requests`)" )) return False if args.ssl: url = "https://{0}/tasks/create/file".format(args.remote) else: url = "http://{0}/tasks/create/file".format(args.remote) files = dict(file=open(file_path, "rb"), filename=os.path.basename(file_path)) data = dict( package=args.package, timeout=sane_timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags, ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user, args.password), files=files, data=data, verify=verify) else: response = requests.post(url, auth=(args.user, args.password), files=files, data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, files=files, data=data, verify=verify) else: response = requests.post(url, files=files, data=data) except Exception as e: print((bold(red("Error")) + ": unable to send file: {0}".format(e))) return False json = response.json() task_ids = [json.get("task_ids")] else: if args.unique and db.check_file_uniq( File(file_path).get_sha256()): msg = ": Sample {0} (skipping file)".format(file_path) if not args.quiet: print((bold(yellow("Duplicate")) + msg)) continue try: tmp_path = store_temp_file( open(file_path, "rb").read(), sanitize_filename(os.path.basename(file_path))) task_ids, extra_details = db.demux_sample_and_add_to_db( file_path=tmp_path, package=args.package, timeout=sane_timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, custom=args.custom, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags, ) except CuckooDemuxError as e: task_ids = [] print((bold(red("Error")) + ": {0}".format(e))) tasks_count = len(task_ids) if tasks_count > 1: if not args.quiet: print((bold(green("Success")) + ': File "{0}" added as task with IDs {1}'.format( file_path, task_ids))) elif tasks_count > 0: if not args.quiet: print((bold(green("Success")) + ': File "{0}" added as task with ID {1}'.format( file_path, task_ids[0]))) else: print((bold(red("Error")) + ": adding task to database"))
def start_analysis(self, options): """Start analysis. @param options: options. @return: operation status. """ log.info("Starting analysis on guest (id=%s, ip=%s)", self.id, self.ip) # TODO: deal with unicode URLs. if options["category"] == "file": options["file_name"] = "'" + sanitize_filename( options["file_name"]) + "'" self.timeout = options["timeout"] + self.cfg.timeouts.critical # Get and set dynamically generated resultserver port. options["port"] = str(ResultServer().port) url = "http://{0}:{1}".format(self.ip, CUCKOO_GUEST_PORT) self.server = TimeoutServer(url, allow_none=True, timeout=self.timeout) try: # Wait for the agent to respond. This is done to check the # availability of the agent and verify that it's ready to receive # data. self.wait(CUCKOO_GUEST_INIT) # Invoke the upload of the analyzer to the guest. self.upload_analyzer() # Give the analysis options to the guest, so it can generate the # analysis.conf inside the guest. try: self.server.add_config(options) except: raise CuckooGuestError("{0}: unable to upload config to " "analysis machine".format(self.id)) # If the target of the analysis is a file, upload it to the guest. if options["category"] == "file": try: file_data = open(options["target"], "rb").read() except (IOError, OSError) as e: raise CuckooGuestError("Unable to read {0}, error: " "{1}".format(options["target"], e)) data = xmlrpclib.Binary(file_data) try: # strip off the added surrounding quotes self.server.add_malware(data, options["file_name"][1:-1]) except Exception as e: raise CuckooGuestError("{0}: unable to upload malware to " "analysis machine: {1}".format( self.id, e)) # check for support files and upload them to guest. self.upload_support_files(options) # Debug analyzer.py in vm if "CUCKOO_DBG" in os.environ: while True: pass # Launch the analyzer. pid = self.server.execute() log.debug("%s: analyzer started with PID %d", self.id, pid) # If something goes wrong when establishing the connection, raise an # exception and abort the analysis. except (socket.timeout, socket.error): raise CuckooGuestError("{0}: guest communication timeout, check " "networking or try to increase " "timeout".format(self.id))
def start_analysis(self, options): """Start analysis. @param options: options. @return: operation status. """ log.info("Starting analysis on guest (id=%s, ip=%s)", self.id, self.ip) # TODO: deal with unicode URLs. if options["category"] == "file": options["file_name"] = "'" + sanitize_filename(options["file_name"]) + "'" self.timeout = options["timeout"] + self.cfg.timeouts.critical # Get and set dynamically generated resultserver port. options["port"] = str(ResultServer().port) url = "http://{0}:{1}".format(self.ip, CUCKOO_GUEST_PORT) self.server = TimeoutServer(url, allow_none=True, timeout=self.timeout) try: # Wait for the agent to respond. This is done to check the # availability of the agent and verify that it's ready to receive # data. self.wait(CUCKOO_GUEST_INIT) # Invoke the upload of the analyzer to the guest. self.upload_analyzer() # Give the analysis options to the guest, so it can generate the # analysis.conf inside the guest. try: self.server.add_config(options) except: raise CuckooGuestError("{0}: unable to upload config to " "analysis machine".format(self.id)) # If the target of the analysis is a file, upload it to the guest. if options["category"] == "file": try: file_data = open(options["target"], "rb").read() except (IOError, OSError) as e: raise CuckooGuestError("Unable to read {0}, error: " "{1}".format(options["target"], e)) data = xmlrpclib.Binary(file_data) try: # strip off the added surrounding quotes self.server.add_malware(data, options["file_name"][1:-1]) except Exception as e: raise CuckooGuestError("{0}: unable to upload malware to " "analysis machine: {1}".format(self.id, e)) # Launch the analyzer. pid = self.server.execute() log.debug("%s: analyzer started with PID %d", self.id, pid) # If something goes wrong when establishing the connection, raise an # exception and abort the analysis. except (socket.timeout, socket.error): raise CuckooGuestError("{0}: guest communication timeout, check " "networking or try to increase " "timeout".format(self.id))