def parse_request_arguments(request): static = request.POST.get("static", "") referrer = validate_referrer(request.POST.get("referrer", None)) package = request.POST.get("package", "") timeout = force_int(request.POST.get("timeout")) priority = force_int(request.POST.get("priority")) options = request.POST.get("options", "") machine = request.POST.get("machine", "") platform = request.POST.get("platform", "") tags = request.POST.get("tags", None) custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) clock = request.POST.get("clock", datetime.now().strftime("%m-%d-%Y %H:%M:%S")) if not clock: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") if "1970" in clock: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") enforce_timeout = bool(request.POST.get("enforce_timeout", False)) shrike_url = request.POST.get("shrike_url", None) shrike_msg = request.POST.get("shrike_msg", None) shrike_sid = request.POST.get("shrike_sid", None) shrike_refer = request.POST.get("shrike_refer", None) unique = bool(request.POST.get("unique", False)) tlp = request.POST.get("tlp", None) lin_options = request.POST.get("lin_options", "") # Linux options if lin_options: options = lin_options return static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, \ shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp
def index(request, resubmit_hash=False): if request.method == "POST": package = request.POST.get("package", "") timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24) options = request.POST.get("options", "") priority = force_int(request.POST.get("priority")) machine = request.POST.get("machine", "") gateway = request.POST.get("gateway", None) clock = request.POST.get("clock", None) custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) enforce_timeout = bool(request.POST.get("enforce_timeout", False)) referrer = validate_referrer(request.POST.get("referrer", None)) tags = request.POST.get("tags", None) opt_filename = "" for option in options.split(","): if option.startswith("filename="): opt_filename = option.split("filename=")[1] break task_gateways = [] ipaddy_re = re.compile( r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" ) if referrer: if options: options += "," options += "referrer=%s" % (referrer) if request.POST.get("free"): if options: options += "," options += "free=yes" if request.POST.get("nohuman"): if options: options += "," options += "nohuman=yes" if request.POST.get("tor"): if options: options += "," options += "tor=yes" if request.POST.get("route", None): if options: options += "," options += "route={0}".format(request.POST.get("route", None)) if request.POST.get("process_dump"): if options: options += "," options += "procdump=0" else: if options: options += "," options += "procdump=1" if request.POST.get("process_memory"): if options: options += "," options += "procmemdump=1" if request.POST.get("import_reconstruction"): if options: options += "," options += "import_reconstruction=1" if request.POST.get("disable_cape"): if options: options += "," options += "disable_cape=1" if request.POST.get("kernel_analysis"): if options: options += "," options += "kernel_analysis=yes" if request.POST.get("norefer"): if options: options += "," options += "norefer=1" orig_options = options if gateway and gateway.lower() == "all": for e in settings.GATEWAYS: if ipaddy_re.match(settings.GATEWAYS[e]): task_gateways.append(settings.GATEWAYS[e]) elif gateway and gateway in settings.GATEWAYS: if "," in settings.GATEWAYS[gateway]: if request.POST.get("all_gw_in_group"): tgateway = settings.GATEWAYS[gateway].split(",") for e in tgateway: task_gateways.append(settings.GATEWAYS[e]) else: tgateway = random.choice( settings.GATEWAYS[gateway].split(",")) task_gateways.append(settings.GATEWAYS[tgateway]) else: task_gateways.append(settings.GATEWAYS[gateway]) if not task_gateways: # To reduce to the default case task_gateways = [None] db = Database() task_ids = [] task_machines = [] if machine.lower() == "all": for entry in db.list_machines(): task_machines.append(entry.label) else: task_machines.append(machine) status = "ok" if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != '': resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) paths = filter( None, [path if os.path.exists(path) else False for path in paths]) if not paths and FULL_DB: tasks = results_db.analysis.find( {"dropped.sha256": resubmission_hash}) if tasks: for task in tasks: # grab task id and replace in path aka distributed cuckoo hack path = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(task["info"]["id"]), "files", resubmission_hash) if os.path.exists(path): paths = [path] break if paths: content = "" content = submit_utils.get_file_content(paths) if content is False: return render(request, "error.html", { "error": "Can't find {} on disk".format(resubmission_hash) }) base_dir = tempfile.mkdtemp(prefix='resubmit_', dir=settings.TEMP_PATH) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + resubmission_hash path = store_temp_file(content, filename) headers = {} url = 'local' params = {} status, task_ids = download_file( content, request, db, task_ids, url, params, headers, "Local", path, package, timeout, options, priority, machine, gateway, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, task_gateways, task_machines) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: try: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) except CuckooDemuxError as err: return render(request, "error.html", {"error": err}) elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except: pass if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority) task_ids.append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_id = db.add_url(url=url, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) if task_id: task_ids.append(task_id) elif settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get( "vtdl", False) and request.POST.get("vtdl")[0] != '': vtdl = request.POST.get("vtdl") if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory" }) else: base_dir = tempfile.mkdtemp(prefix='cuckoovtdl', dir=settings.VTDL_PATH) hashlist = [] if "," in vtdl: hashlist = vtdl.replace(" ", "").strip().split(",") else: hashlist = vtdl.split() for h in hashlist: if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + h paths = db.sample_path_by_hash(h) content = "" if paths is not None: content = submit_utils.get_file_content(paths) headers = {} url = 'https://www.virustotal.com/intelligence/download/' params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h} if content is False: if settings.VTDL_PRIV_KEY: url = 'https://www.virustotal.com/vtapi/v2/file/download' params = { 'apikey': settings.VTDL_PRIV_KEY, 'hash': h } status, task_ids = download_file( content, request, db, task_ids, url, params, headers, "VirusTotal", filename, package, timeout, options, priority, machine, gateway, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, task_gateways, task_machines) else: status, task_ids = download_file( content, request, db, task_ids, url, params, headers, "Local", filename, package, timeout, options, priority, machine, gateway, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, task_gateways, task_machines) if status == "error": # is render msg return task_ids tasks_count = len(task_ids) if tasks_count > 0: return render(request, "submission/complete.html", { "tasks": task_ids, "tasks_count": tasks_count }) else: return render(request, "error.html", {"error": "Error adding task to Cuckoo's database."}) else: cfg = Config("cuckoo") enabledconf = dict() enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = Config("processing").memory.get("enabled") enabledconf["procmemory"] = Config("processing").procmemory.get( "enabled") enabledconf["tor"] = Config("auxiliary").tor.get("enabled") if Config("auxiliary").gateways: enabledconf["gateways"] = True else: enabledconf["gateways"] = False enabledconf["tags"] = False # Get enabled machinery machinery = Config("cuckoo").cuckoo.get("machinery") # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config(machinery), machinery).get( "machines").split(",") ] # Check each VM config element for tags for vmtag in vms: if "tags" in getattr(Config(machinery), vmtag).keys(): enabledconf["tags"] = True files = os.listdir( os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages")) packages = [] for name in files: name = os.path.splitext(name)[0] if name == "__init__": continue packages.append(name) # Prepare a list of VM names, description label based on tags. machines = [] for machine in Database().list_machines(): tags = [] for tag in machine.tags: tags.append(tag.name) if tags: label = machine.label + ": " + ", ".join(tags) else: label = machine.label machines.append((machine.label, label)) # Prepend ALL/ANY options. machines.insert(0, ("", "First available")) machines.insert(1, ("all", "All")) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": vpns.values(), "route": cfg.routing.route, "internet": cfg.routing.internet, "inetsim": cfg.routing.inetsim, "tor": cfg.routing.tor, "gateways": settings.GATEWAYS, "config": enabledconf, "resubmit": resubmit_hash, })
def index(request): if request.method == "POST": package = request.POST.get("package", "") timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24) options = request.POST.get("options", "") priority = force_int(request.POST.get("priority")) machine = request.POST.get("machine", "") gateway = request.POST.get("gateway", None) clock = request.POST.get("clock", None) custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) enforce_timeout = bool(request.POST.get("enforce_timeout", False)) referrer = validate_referrer(request.POST.get("referrer", None)) tags = request.POST.get("tags", None) task_gateways = [] ipaddy_re = re.compile( r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" ) if referrer: if options: options += "," options += "referrer=%s" % (referrer) if request.POST.get("free"): if options: options += "," options += "free=yes" if request.POST.get("nohuman"): if options: options += "," options += "nohuman=yes" if request.POST.get("tor"): if options: options += "," options += "tor=yes" if request.POST.get("process_memory"): if options: options += "," options += "procmemdump=yes" if request.POST.get("kernel_analysis"): if options: options += "," options += "kernel_analysis=yes" orig_options = options if gateway and gateway.lower() == "all": for e in settings.GATEWAYS: if ipaddy_re.match(settings.GATEWAYS[e]): task_gateways.append(settings.GATEWAYS[e]) elif gateway and gateway in settings.GATEWAYS: if "," in settings.GATEWAYS[gateway]: if request.POST.get("all_gw_in_group"): tgateway = settings.GATEWAYS[gateway].split(",") for e in tgateway: task_gateways.append(settings.GATEWAYS[e]) else: tgateway = random.choice( settings.GATEWAYS[gateway].split(",")) task_gateways.append(settings.GATEWAYS[tgateway]) else: task_gateways.append(settings.GATEWAYS[gateway]) if not task_gateways: # To reduce to the default case task_gateways = [None] db = Database() task_ids = [] task_machines = [] if machine.lower() == "all": for entry in db.list_machines(): task_machines.append(entry.label) else: task_machines.append(machine) if "sample" in request.FILES: samples = request.FILES.getlist("sample") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except: pass if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority) task_ids.append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_id = db.add_url(url=url, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) if task_id: task_ids.append(task_id) elif settings.VTDL_ENABLED and "vtdl" in request.POST: vtdl = request.POST.get("vtdl").strip() if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory" }) else: base_dir = tempfile.mkdtemp(prefix='cuckoovtdl', dir=settings.VTDL_PATH) hashlist = [] if "," in vtdl: hashlist = vtdl.split(",") else: hashlist.append(vtdl) onesuccess = False for h in hashlist: filename = base_dir + "/" + h if settings.VTDL_PRIV_KEY: url = 'https://www.virustotal.com/vtapi/v2/file/download' params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h} else: url = 'https://www.virustotal.com/intelligence/download/' params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h} try: r = requests.get(url, params=params, verify=True) except requests.exceptions.RequestException as e: return render( request, "error.html", { "error": "Error completing connection to VirusTotal: {0}" .format(e) }) if r.status_code == 200: try: f = open(filename, 'wb') f.write(r.content) f.close() except: return render( request, "error.html", { "error": "Error writing VirusTotal download file to temporary path" }) onesuccess = True for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db( file_path=filename, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif r.status_code == 403: return render( request, "error.html", { "error": "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads" }) if not onesuccess: return render( request, "error.html", {"error": "Provided hash not found on VirusTotal"}) tasks_count = len(task_ids) if tasks_count > 0: return render(request, "submission/complete.html", { "tasks": task_ids, "tasks_count": tasks_count }) else: return render(request, "error.html", {"error": "Error adding task to Cuckoo's database."}) else: enabledconf = dict() enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = Config("processing").memory.get("enabled") enabledconf["procmemory"] = Config("processing").procmemory.get( "enabled") enabledconf["tor"] = Config("auxiliary").tor.get("enabled") if Config("auxiliary").gateways: enabledconf["gateways"] = True else: enabledconf["gateways"] = False enabledconf["tags"] = False # Get enabled machinery machinery = Config("cuckoo").cuckoo.get("machinery") # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config(machinery), machinery).get( "machines").split(",") ] # Check each VM config element for tags for vmtag in vms: if "tags" in getattr(Config(machinery), vmtag).keys(): enabledconf["tags"] = True files = os.listdir( os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages")) packages = [] for name in files: name = os.path.splitext(name)[0] if name == "__init__": continue packages.append(name) # Prepare a list of VM names, description label based on tags. machines = [] for machine in Database().list_machines(): tags = [] for tag in machine.tags: tags.append(tag.name) if tags: label = machine.label + ": " + ", ".join(tags) else: label = machine.label machines.append((machine.label, label)) # Prepend ALL/ANY options. machines.insert(0, ("", "First available")) machines.insert(1, ("all", "All")) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "gateways": settings.GATEWAYS, "config": enabledconf })
def index(request, resubmit_hash=False): if request.method == "POST": package = request.POST.get("package", "") timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24) options = request.POST.get("options", "") lin_options = request.POST.get("lin_options", "") priority = force_int(request.POST.get("priority")) machine = request.POST.get("machine", "") clock = request.POST.get( "clock", datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")) if not clock: clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S") if "1970" in clock: clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S") custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) enforce_timeout = bool(request.POST.get("enforce_timeout", False)) referrer = validate_referrer(request.POST.get("referrer", None)) tags = request.POST.get("tags", None) static = bool(request.POST.get("static", False)) all_tags = load_vms_tags() if tags and not all( [tag.strip() in all_tags for tag in tags.split(",")]): return render(request, "error.html", { "error": "Check Tags help, you have introduced incorrect tag(s)" }) if lin_options: options = lin_options # This is done to remove spaces in options but not breaks custom paths options = ','.join('='.join(value.strip() for value in option.split("=", 1)) for option in options.split(",") if option and '=' in option) opt_filename = get_user_filename(options, custom) if options: options += "," if referrer: options += "referrer=%s," % (referrer) if request.POST.get("free"): options += "free=yes," if request.POST.get("nohuman"): options += "nohuman=yes," if request.POST.get("tor"): options += "tor=yes," if request.POST.get("route", None): options += "route={0},".format(request.POST.get("route", None)) if request.POST.get("process_dump"): options += "procdump=0," if request.POST.get("process_memory"): options += "procmemdump=1," if request.POST.get("import_reconstruction"): options += "import_reconstruction=1," if request.POST.get("disable_cape"): options += "disable_cape=1," if request.POST.get("kernel_analysis"): options += "kernel_analysis=yes," if request.POST.get("norefer"): options += "norefer=1," if request.POST.get("oldloader"): options += "loader=oldloader.exe,loader_64=oldloader_x64.exe," if request.POST.get("unpack"): options += "unpack=yes," options = options[:-1] unique = request.POST.get("unique", False) orig_options = options task_ids = [] task_machines = [] status = "ok" failed_hashes = list() task_ids_tmp = list() if "hash" in request.POST and request.POST.get( "hash", False) and request.POST.get("hash")[0] != '': resubmission_hash = request.POST.get("hash").strip() paths = db.sample_path_by_hash(resubmission_hash) if paths: paths = [ _f for _f in [ path if os.path.exists(path) else False for path in paths ] if _f ] if not paths and FULL_DB: tasks = results_db.analysis.find( {"dropped.sha256": resubmission_hash}, { "info.id": 1, "_id": 0 }) if tasks: for task in tasks or []: # grab task id and replace in path if needed aka distributed hack path = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(task["info"]["id"]), "files", resubmission_hash) if os.path.exists(path): paths = [path] break if paths: content = False content = get_file_content(paths) if not content: return render( request, "error.html", { "error": "Can't find {} on disk, {}".format( resubmission_hash, str(paths)) }) base_dir = tempfile.mkdtemp(prefix='resubmit_', dir=settings.TEMP_PATH) if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename( resubmission_hash) path = store_temp_file(content, filename) headers = {} url = 'local' params = {} status, task_ids = download_file( False, content, request, db, task_ids, url, params, headers, "Local", path, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static) else: return render( request, "error.html", {"error": "File not found on hdd for resubmission"}) elif "sample" in request.FILES: samples = request.FILES.getlist("sample") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum allowed upload size " "specified in web/web/local_settings.py." }) if opt_filename: filename = opt_filename else: filename = sanitize_filename(sample.name) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), filename) if unique and db.check_file_uniq(File(path).get_sha256()): return render( request, "error.html", { "error": "Duplicated file, disable unique option to force submission" }) magic_type = get_magic_type(path) if disable_x64 is True: if magic_type and ("x86-64" in magic_type or "PE32+" in magic_type): if len(samples) == 1: return render( request, "error.html", {"error": "Sorry no x64 support yet"}) else: continue orig_options, timeout, enforce_timeout = recon( path, orig_options, timeout, enforce_timeout) platform = get_platform(magic_type) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine) if hasattr(machine_details, "platform" ) and not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None try: task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, platform=platform, enforce_timeout=enforce_timeout, tags=tags, clock=clock, static=static) task_ids.extend(task_ids_new) except CuckooDemuxError as err: return render(request, "error.html", {"error": err}) elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a quarantine file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except Exception as e: pass if not path: return render(request, "error.html", { "error": "You uploaded an unsupported quarantine file." }) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform="windows") ] elif machine: machine_details = db.view_machine(machine) if not machine_details.platform == "windows": return render( request, "error.html", { "error": "Wrong platform, linux VM selected for {} sample" .format(machine_details.platform) }) else: task_machines = [machine] if not task_machines: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, tags=tags, enforce_timeout=enforce_timeout, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif "static" in request.FILES: samples = request.FILES.getlist("static") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) task_id = db.add_static(file_path=path, priority=priority) if not task_id: return render( request, "error.html", {"error": "We don't have static extractor for this"}) task_ids.append(task_id) elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render( request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render( request, "error.html", { "error": "You uploaded a PCAP file that exceeds the maximum \ allowed upload size specified in web/web/local_settings.py." }) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except Exception as e: pass path = saz else: return render( request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority) if task_id: task_ids.append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform="windows") ] elif machine: machine_details = db.view_machine(machine) if not machine_details.platform == "windows": return render( request, "error.html", { "error": "Wrong platform, linux VM selected for {} sample". format(machine_details.platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.add_url(url=url, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): url = request.POST.get("dlnexec").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace( "hxxp://", "http://").replace("[.]", ".") response = _download_file(request.POST.get("route", None), url, options) if not response: return render(request, "error.html", {"error": "Was impossible to retrieve url"}) name = os.path.basename(url) if not "." in name: name = get_user_filename(options, custom) or generate_fake_name() path = store_temp_file(response, name) magic_type = get_magic_type(path) platform = get_platform(magic_type) if machine.lower() == "all": task_machines = [ vm.name for vm in db.list_machines(platform=platform) ] elif machine: machine_details = db.view_machine(machine[0]) if not machine_details.platform == platform: return render( request, "error.html", { "error": "Wrong platform, {} VM selected for {} sample". format(machine_details.platform, platform) }) else: task_machines = [machine] else: task_machines = ["first"] for entry in task_machines: if entry == "first": entry = None task_ids_new = db.demux_sample_and_add_to_db( file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, platform=platform, clock=clock) if task_ids_new: task_ids.extend(task_ids_new) elif settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) \ and request.POST.get("vtdl")[0] != '': vtdl = request.POST.get("vtdl").strip() if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH: return render( request, "error.html", { "error": "You specified VirusTotal but must edit the file and specify your " "VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory" }) else: hashlist = [] if "," in vtdl: hashlist = [ _f for _f in vtdl.replace(" ", "").strip().split(",") if _f ] else: hashlist.append(vtdl) for h in hashlist: base_dir = tempfile.mkdtemp(prefix='cuckoovtdl', dir=settings.VTDL_PATH) task_ids_tmp = list() if opt_filename: filename = base_dir + "/" + opt_filename else: filename = base_dir + "/" + sanitize_filename(h) headers = {} paths = db.sample_path_by_hash(h) content = False if paths: content = get_file_content(paths) if settings.VTDL_PRIV_KEY: headers = {'x-apikey': settings.VTDL_PRIV_KEY} elif settings.VTDL_INTEL_KEY: headers = {'x-apikey': settings.VTDL_INTEL_KEY} url = "https://www.virustotal.com/api/v3/files/{id}/download".format( id=h) params = {} if not content: status, task_ids_tmp = download_file( False, content, request, db, task_ids, url, params, headers, "VirusTotal", filename, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static, h) else: status, task_ids_tmp = download_file( False, content, request, db, task_ids, url, params, headers, "Local", filename, package, timeout, options, priority, machine, clock, custom, memory, enforce_timeout, referrer, tags, orig_options, "", static, h) if status is "ok": task_ids = task_ids_tmp else: failed_hashes.append(h) if not isinstance(task_ids, list) and status == "error": # is render msg return task_ids if not isinstance(task_ids_tmp, list) and status == "error": # is render msg return task_ids_tmp if isinstance(task_ids, list): tasks_count = len(task_ids) else: # ToDo improve error msg tasks_count = 0 tasks_count = len(task_ids) if tasks_count > 0: data = {"tasks": task_ids, "tasks_count": tasks_count} if failed_hashes: data["failed_hashes"] = failed_hashes return render(request, "submission/complete.html", data) else: return render(request, "error.html", {"error": "Error adding task to Cuckoo's database."}) else: enabledconf = dict() enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") enabledconf["dlnexec"] = settings.DLNEXEC enabledconf["url_analysis"] = settings.URL_ANALYSIS enabledconf["tags"] = False enabledconf[ "dist_master_storage_only"] = repconf.distributed.master_storage_only enabledconf["linux_on_gui"] = web_conf.linux.enabled all_tags = load_vms_tags() if all_tags: enabledconf["tags"] = True if not enabledconf["tags"]: # load multi machinery tags: # Get enabled machinery machinery = cfg.cuckoo.get("machinery") if machinery == "multi": for mmachinery in Config(machinery).multi.get( "machinery").split(","): vms = [ x.strip() for x in getattr(Config( mmachinery), mmachinery).get("machines").split(",") ] if any([ "tags" in list(getattr(Config(mmachinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True break else: # Get VM names for machinery config elements vms = [ x.strip() for x in getattr(Config( machinery), machinery).get("machines").split(",") ] # Check each VM config element for tags if any([ "tags" in list(getattr(Config(machinery), vmtag).keys()) for vmtag in vms ]): enabledconf["tags"] = True packages, machines = get_form_data("windows") socks5s = _load_socks5_operational() socks5s_random = "" if socks5s: socks5s_random = random.choice(list(socks5s.values())).get( "description", False) return render( request, "submission/index.html", { "packages": sorted(packages), "machines": machines, "vpns": list(vpns.values()), "socks5s": list(socks5s.values()), "socks5s_random": socks5s_random, "route": routing.routing.route, "internet": routing.routing.internet, "inetsim": routing.inetsim.enabled, "tor": routing.tor.enabled, "config": enabledconf, "resubmit": resubmit_hash, "tags": sorted(list(set(all_tags))) })
def test_validate_referrer_no_url(self): assert utils.validate_referrer(url=None) is None
def test_validate_referrer_bad_url(self): assert utils.validate_referrer( url="irc://foo.example.com:1337") is None
def test_validate_referrer(self): assert utils.validate_referrer(url="http://foo.example.com:1337/bar" ) == "http://foo.example.com:1337/bar"
def index(request): if request.method == "POST": package = request.POST.get("package", "") timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24) options = request.POST.get("options", "") priority = force_int(request.POST.get("priority")) machine = request.POST.get("machine", "") gateway = request.POST.get("gateway", None) clock = request.POST.get("clock", None) custom = request.POST.get("custom", "") memory = bool(request.POST.get("memory", False)) enforce_timeout = bool(request.POST.get("enforce_timeout", False)) referrer = validate_referrer(request.POST.get("referrer", None)) tags = request.POST.get("tags", None) task_gateways = [] ipaddy_re = re.compile(r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$") if referrer: if options: options += "," options += "referrer=%s" % (referrer) if request.POST.get("free"): if options: options += "," options += "free=yes" if request.POST.get("nohuman"): if options: options += "," options += "nohuman=yes" if request.POST.get("tor"): if options: options += "," options += "tor=yes" if request.POST.get("process_memory"): if options: options += "," options += "procmemdump=yes" if request.POST.get("kernel_analysis"): if options: options += "," options += "kernel_analysis=yes" if request.POST.get("norefer"): if options: options += "," options += "norefer=1" orig_options = options if gateway and gateway.lower() == "all": for e in settings.GATEWAYS: if ipaddy_re.match(settings.GATEWAYS[e]): task_gateways.append(settings.GATEWAYS[e]) elif gateway and gateway in settings.GATEWAYS: if "," in settings.GATEWAYS[gateway]: if request.POST.get("all_gw_in_group"): tgateway = settings.GATEWAYS[gateway].split(",") for e in tgateway: task_gateways.append(settings.GATEWAYS[e]) else: tgateway = random.choice(settings.GATEWAYS[gateway].split(",")) task_gateways.append(settings.GATEWAYS[tgateway]) else: task_gateways.append(settings.GATEWAYS[gateway]) if not task_gateways: # To reduce to the default case task_gateways = [None] db = Database() task_ids = [] task_machines = [] if machine.lower() == "all": for entry in db.list_machines(): task_machines.append(entry.label) else: task_machines.append(machine) if "sample" in request.FILES: samples = request.FILES.getlist("sample") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render(request, "error.html", {"error": "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."}) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: try: task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) except CuckooDemuxError as err: return render(request, "error.html", {"error": err}) elif "quarantine" in request.FILES: samples = request.FILES.getlist("quarantine") for sample in samples: # Error if there was only one submitted sample and it's empty. # But if there are multiple and one was empty, just ignore it. if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty quarantine file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render(request, "error.html", {"error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."}) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). tmp_path = store_temp_file(sample.read(), sample.name) path = unquarantine(tmp_path) try: os.remove(tmp_path) except: pass if not path: return render(request, "error.html", {"error": "You uploaded an unsupported quarantine file."}) for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif "pcap" in request.FILES: samples = request.FILES.getlist("pcap") for sample in samples: if not sample.size: if len(samples) != 1: continue return render(request, "error.html", {"error": "You uploaded an empty PCAP file."}) elif sample.size > settings.MAX_UPLOAD_SIZE: return render(request, "error.html", {"error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."}) # Moving sample from django temporary file to Cuckoo temporary storage to # let it persist between reboot (if user like to configure it in that way). path = store_temp_file(sample.read(), sample.name) if sample.name.lower().endswith(".saz"): saz = saz_to_pcap(path) if saz: try: os.remove(path) except: pass path = saz else: return render(request, "error.html", {"error": "Conversion from SAZ to PCAP failed."}) task_id = db.add_pcap(file_path=path, priority=priority) task_ids.append(task_id) elif "url" in request.POST and request.POST.get("url").strip(): url = request.POST.get("url").strip() if not url: return render(request, "error.html", {"error": "You specified an invalid URL!"}) url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".") for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_id = db.add_url(url=url, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) if task_id: task_ids.append(task_id) elif settings.VTDL_ENABLED and "vtdl" in request.POST: vtdl = request.POST.get("vtdl").strip() if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH: return render(request, "error.html", {"error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"}) else: base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',dir=settings.VTDL_PATH) hashlist = [] if "," in vtdl: hashlist=vtdl.split(",") else: hashlist.append(vtdl) onesuccess = False for h in hashlist: filename = base_dir + "/" + h if settings.VTDL_PRIV_KEY: url = 'https://www.virustotal.com/vtapi/v2/file/download' params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h} else: url = 'https://www.virustotal.com/intelligence/download/' params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h} try: r = requests.get(url, params=params, verify=True) except requests.exceptions.RequestException as e: return render(request, "error.html", {"error": "Error completing connection to VirusTotal: {0}".format(e)}) if r.status_code == 200: try: f = open(filename, 'wb') f.write(r.content) f.close() except: return render(request, "error.html", {"error": "Error writing VirusTotal download file to temporary path"}) onesuccess = True for gw in task_gateways: options = update_options(gw, orig_options) for entry in task_machines: task_ids_new = db.demux_sample_and_add_to_db(file_path=filename, package=package, timeout=timeout, options=options, priority=priority, machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock) task_ids.extend(task_ids_new) elif r.status_code == 403: return render(request, "error.html", {"error": "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"}) if not onesuccess: return render(request, "error.html", {"error": "Provided hash not found on VirusTotal"}) tasks_count = len(task_ids) if tasks_count > 0: return render(request, "submission/complete.html", {"tasks" : task_ids, "tasks_count" : tasks_count}) else: return render(request, "error.html", {"error": "Error adding task to Cuckoo's database."}) else: enabledconf = dict() enabledconf["vt"] = settings.VTDL_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = Config("processing").memory.get("enabled") enabledconf["procmemory"] = Config("processing").procmemory.get("enabled") enabledconf["tor"] = Config("auxiliary").tor.get("enabled") if Config("auxiliary").gateways: enabledconf["gateways"] = True else: enabledconf["gateways"] = False enabledconf["tags"] = False # Get enabled machinery machinery = Config("cuckoo").cuckoo.get("machinery") # Get VM names for machinery config elements vms = [x.strip() for x in getattr(Config(machinery), machinery).get("machines").split(",")] # Check each VM config element for tags for vmtag in vms: if "tags" in getattr(Config(machinery), vmtag).keys(): enabledconf["tags"] = True files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages")) packages = [] for name in files: name = os.path.splitext(name)[0] if name == "__init__": continue packages.append(name) # Prepare a list of VM names, description label based on tags. machines = [] for machine in Database().list_machines(): tags = [] for tag in machine.tags: tags.append(tag.name) if tags: label = machine.label + ": " + ", ".join(tags) else: label = machine.label machines.append((machine.label, label)) # Prepend ALL/ANY options. machines.insert(0, ("", "First available")) machines.insert(1, ("all", "All")) return render(request, "submission/index.html", {"packages": sorted(packages), "machines": machines, "gateways": settings.GATEWAYS, "config": enabledconf})
def parse_request_arguments(request, keyword="POST"): # Django uses request.POST and API uses request.data static = getattr(request, keyword).get("static", "") referrer = validate_referrer(getattr(request, keyword).get("referrer")) package = getattr(request, keyword).get("package", "") timeout = force_int(getattr(request, keyword).get("timeout")) priority = force_int(getattr(request, keyword).get("priority")) options = getattr(request, keyword).get("options", "") machine = getattr(request, keyword).get("machine", "") platform = getattr(request, keyword).get("platform", "") tags_tasks = getattr(request, keyword).get("tags_tasks") tags = getattr(request, keyword).get("tags") custom = getattr(request, keyword).get("custom", "") memory = force_bool(getattr(request, keyword).get("memory", False)) clock = getattr(request, keyword).get("clock", datetime.now().strftime("%m-%d-%Y %H:%M:%S")) if not clock: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") if "1970" in clock: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") enforce_timeout = force_bool( getattr(request, keyword).get("enforce_timeout", False)) shrike_url = getattr(request, keyword).get("shrike_url") shrike_msg = getattr(request, keyword).get("shrike_msg") shrike_sid = getattr(request, keyword).get("shrike_sid") shrike_refer = getattr(request, keyword).get("shrike_refer") unique = force_bool(getattr(request, keyword).get("unique", False)) tlp = getattr(request, keyword).get("tlp") lin_options = getattr(request, keyword).get("lin_options", "") route = getattr(request, keyword).get("route") cape = getattr(request, keyword).get("cape", "") if getattr(request, keyword).get("process_dump"): if options: options += "," options += "procmemdump=1,procdump=1" if referrer: if options: options += "," options += "referrer=%s" % (referrer) # Linux options if lin_options: options = lin_options return ( static, package, timeout, priority, options, machine, platform, tags, custom, memory, clock, enforce_timeout, shrike_url, shrike_msg, shrike_sid, shrike_refer, unique, referrer, tlp, tags_tasks, route, cape, )