def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) # Let's hope the file is not too big. buf = open(dmp_path, "rb").read() urls = set() for url in re.findall(HTTP_REGEX, buf): if not is_whitelisted_domain(url[1]): urls.add("".join(url)) proc = dict( file=dmp_path, pid=int(os.path.basename(dmp_path).split("-")[0]), yara=dmp_file.get_yara( os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), urls=list(urls), ) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path) ) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int( os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results[ "behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict(file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara( os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path)) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) if "-" in os.path.basename(dump_path): pid = int(os.path.basename(dump_path).split("-")[0]) else: pid = int(os.path.basename(dump_path).split(".")[0]) proc = dict( file=dump_path, pid=pid, yara=dump_file.get_yara("memory"), urls=list(self.extract_urls(dump_path)), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) if "-" in os.path.basename(dump_path): pid = int(os.path.basename(dump_path).split("-")[0]) else: pid = int(os.path.basename(dump_path).split(".")[0]) proc = dict( file=dump_path, pid=pid, yara=dump_file.get_yara("memory"), urls=list(dump_file.get_urls()), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) # Let's hope the file is not too big. buf = open(dmp_path, "rb").read() urls = set() for url in re.findall(HTTP_REGEX, buf): if not is_whitelisted_domain(url[1]): urls.add("".join(url)) proc = dict( file=dmp_path, pid=os.path.splitext(os.path.basename(dmp_path))[0], yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), urls=list(urls), ) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if self.options.get("extract_img") and not HAVE_PEFILE: log.warning( "In order to extract PE files from memory dumps it is " "required to have pefile installed (`pip install pefile`).") if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) pid, num = map(int, re.findall("(\\d+)", dmp)) proc = dict( file=dump_path, pid=pid, num=num, yara=dump_file.get_yara("memory"), urls=list(dump_file.get_urls()), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) if self.options.get("extract_img") and HAVE_PEFILE: proc["extracted"] = list(self.dump_images(proc)) if self.options.get("dump_delete"): try: os.remove(dump_path) except OSError: log.error( "Unable to delete memory dump file at path \"%s\"", dump_path) results.append(proc) results.sort(key=lambda x: (x["pid"], x["num"])) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) proc = dict(yara=dmp_file.get_yara( os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar"))) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if self.options.get("extract_img") and not HAVE_PEFILE: log.warning( "In order to extract PE files from memory dumps it is " "required to have pefile installed (`pip install pefile`)." ) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) pid, num = map(int, re.findall("(\\d+)", dmp)) proc = dict( file=dump_path, pid=pid, num=num, yara=dump_file.get_yara("memory"), urls=list(dump_file.get_urls()), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) if self.options.get("extract_img") and HAVE_PEFILE: proc["extracted"] = list(self.dump_images(proc)) if self.options.get("dump_delete"): try: os.remove(dump_path) except OSError: log.error("Unable to delete memory dump file at path \"%s\"", dump_path) results.append(proc) results.sort(key=lambda x: (x["pid"], x["num"])) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) proc = dict( yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")) ) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) dump_name = os.path.basename(dump_path) pid = int(re.findall("(\\d{2,5})", dump_name)[0]) proc = dict( file=dump_path, pid=pid, yara=dump_file.get_yara("memory"), urls=list(dump_file.get_urls()), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) if self.options.get("dump_delete"): try: os.remove(dump_path) except OSError: log.error( "Unable to delete memory dump file at path \"%s\"", dump_path) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): if not dmp.endswith(".dmp"): continue dump_path = os.path.join(self.pmemory_path, dmp) dump_file = File(dump_path) dump_name = os.path.basename(dump_path) pid = int(re.findall("(\d{2,5})", dump_name)[0]) proc = dict( file=dump_path, pid=pid, yara=dump_file.get_yara("memory"), urls=list(dump_file.get_urls()), regions=list(self.read_dump(dump_path)), ) if self.options.get("idapro"): self.create_idapy(proc) if self.options.get("dump_delete"): try: os.remove(dump_path) except OSError: log.error("Unable to delete memory dump file at path \"%s\"", dump_path) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = str(self.options.get("minchars", 5)).encode("utf-8") if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) if os.path.getsize(dmp_path) == 0: continue dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int( os.path.splitext(os.path.basename(dmp_path))[0]) for process in self.results.get("behavior", {}).get( "processes", []) or []: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] procdump = ProcDump(dmp_path, pretty=True) proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(category="memory"), cape_yara=dmp_file.get_yara(category="CAPE"), address_space=procdump.pretty_print(), ) for hit in proc["cape_yara"]: hit["memblocks"] = dict() for item in hit["addresses"]: memblock = self.get_yara_memblock( proc["address_space"], hit["addresses"][item]) if memblock: hit["memblocks"][item] = memblock # if self.options.get("extract_pe", False) extracted_pes = self.get_procmemory_pe(proc) endlimit = b"" if not HAVE_RE2: endlimit = b"8192" if do_strings: if nulltermonly: apat = b"([\x20-\x7e]{" + minchars + b"," + endlimit + b"})\x00" upat = b"((?:[\x20-\x7e][\x00]){" + minchars + b"," + endlimit + b"})\x00\x00" else: apat = b"[\x20-\x7e]{" + minchars + b"," + endlimit + b"}" upat = b"(?:[\x20-\x7e][\x00]){" + minchars + b"," + endlimit + b"}" matchdict = procdump.search(apat, all=True) strings = matchdict["matches"] matchdict = procdump.search(upat, all=True) ustrings = matchdict["matches"] for ws in ustrings: strings.append(ws.decode("utf-16le").encode("utf-8")) proc["strings_path"] = dmp_path + ".strings" proc["extracted_pe"] = extracted_pes f = open(proc["strings_path"], "wb") f.write(b"\n".join(strings)) f.close() procdump.close() results.append(proc) if "cape_yara" in proc: cape_name = "" for hit in proc["cape_yara"]: if "name" in hit: if not cape_name: cape_name = hit["name"] if cape_name: if "detections" not in self.results: self.results["detections"] = cape_name return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = self.options.get("minchars", 5) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] procdump = ProcDump(dmp_path, pretty=True) proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=procdump.pretty_print(), ) endlimit = "" if not HAVE_RE2: endlimit = "8192" if do_strings: if nulltermonly: apat = "([\x20-\x7e]{" + str(minchars) + "," + endlimit + "})\x00" upat = "((?:[\x20-\x7e][\x00]){" + str(minchars) + "," + endlimit + "})\x00\x00" else: apat = "[\x20-\x7e]{" + str(minchars) + "," + endlimit + "}" upat = "(?:[\x20-\x7e][\x00]){" + str(minchars) + "," + endlimit + "}" matchdict = procdump.search(apat, all=True) strings = matchdict["matches"] matchdict = procdump.search(upat, all=True) ustrings = matchdict["matches"] for ws in ustrings: strings.append(str(ws.decode("utf-16le"))) proc["strings_path"] = dmp_path + ".strings" f=open(proc["strings_path"], "w") f.write("\n".join(strings)) f.close() procdump.close() # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique inject elements)" % len(output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique redirect elements)" % len(output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] zipdump = self.options.get("zipdump", False) zipstrings = self.options.get("zipstrings", False) do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = self.options.get("minchars", 5) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path), zipdump=zipdump, zipstrings=zipstrings, ) if do_strings: try: data = open(dmp_path, "r").read() except (IOError, OSError) as e: raise CuckooProcessingError("Error opening file %s" % e) if nulltermonly: apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00" strings = re.findall(apat, data) upat = "((?:[\x20-\x7e][\x00]){" + str(minchars) + ",})\x00\x00" strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)] f = open(dmp_path + ".strings", "w") f.write("\n".join(strings)) f.close() proc["strings_path"] = dmp_path + ".strings" else: apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00" strings = re.findall(apat, data) upat = "(?:[\x20-\x7e][\x00]){" + str(minchars) + ",}" strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)] f = open(dmp_path + ".strings", "w") f.write("\n".join(strings)) f.close() proc["strings_path"] = dmp_path + ".strings" zipstrings = self.options.get("zipstrings", False) if zipstrings: try: f = zipfile.ZipFile("%s.zip" % (proc["strings_path"]), "w") f.write(proc["strings_path"], os.path.basename(proc["strings_path"]), zipfile.ZIP_DEFLATED) f.close() os.remove(proc["strings_path"]) proc["strings_path"] = "%s.zip" % (proc["strings_path"]) except: raise CuckooProcessingError("Error creating Process Memory Strings Zip File %s" % e) # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique inject elements)" % len(output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique redirect elements)" % len(output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] if zipdump: try: f = zipfile.ZipFile("%s.zip" % (dmp_path), "w") f.write(dmp_path, os.path.basename(dmp_path), zipfile.ZIP_DEFLATED) f.close() os.remove(dmp_path) proc["file"] = "%s.zip" % (dmp_path) except: raise CuckooProcessingError("Error creating Process Memory Zip File %s" % e) results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path) ) # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique inject elements)" % len(output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique redirect elements)" % len(output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = self.options.get("minchars", 5) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int( os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results[ "behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] procdump = ProcDump(dmp_path, pretty=True) proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara( os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=procdump.pretty_print(), ) endlimit = "" if not HAVE_RE2: endlimit = "8192" if do_strings: if nulltermonly: apat = "([\x20-\x7e]{" + str( minchars) + "," + endlimit + "})\x00" upat = "((?:[\x20-\x7e][\x00]){" + str( minchars) + "," + endlimit + "})\x00\x00" else: apat = "[\x20-\x7e]{" + str( minchars) + "," + endlimit + "}" upat = "(?:[\x20-\x7e][\x00]){" + str( minchars) + "," + endlimit + "}" matchdict = procdump.search(apat, all=True) strings = matchdict["matches"] matchdict = procdump.search(upat, all=True) ustrings = matchdict["matches"] for ws in ustrings: strings.append(str(ws.decode("utf-16le"))) proc["strings_path"] = dmp_path + ".strings" f = open(proc["strings_path"], "w") f.write("\n".join(strings)) f.close() procdump.close() # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"][ "description"] += " (Observed %d unique inject elements)" % len( output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"][ "description"] += " (Observed %d unique redirect elements)" % len( output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int( os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results[ "behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict(file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara( os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path)) # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"][ "description"] += " (Observed %d unique inject elements)" % len( output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"][ "description"] += " (Observed %d unique redirect elements)" % len( output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = self.options.get("minchars", 5) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] procdump = ProcDump(dmp_path, pretty=True) proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=procdump.pretty_print(), ) endlimit = "" if not HAVE_RE2: endlimit = "8192" if do_strings: if nulltermonly: apat = "([\x20-\x7e]{" + str(minchars) + "," + endlimit + "})\x00" upat = "((?:[\x20-\x7e][\x00]){" + str(minchars) + "," + endlimit + "})\x00\x00" else: apat = "[\x20-\x7e]{" + str(minchars) + "," + endlimit + "}" upat = "(?:[\x20-\x7e][\x00]){" + str(minchars) + "," + endlimit + "}" matchdict = procdump.search(apat, all=True) strings = matchdict["matches"] matchdict = procdump.search(upat, all=True) ustrings = matchdict["matches"] for ws in ustrings: strings.append(str(ws.decode("utf-16le"))) proc["strings_path"] = dmp_path + ".strings" f=open(proc["strings_path"], "w") f.write("\n".join(strings)) f.close() procdump.close() results.append(proc) return results
def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] zipdump = self.options.get("zipdump", False) zipstrings = self.options.get("zipstrings", False) do_strings = self.options.get("strings", False) nulltermonly = self.options.get("nullterminated_only", True) minchars = self.options.get("minchars", 5) if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): # if we're re-processing this task, this means if zips are enabled, we won't do any reprocessing on the # process dumps (only matters for now for Yara) if not dmp.endswith(".dmp"): continue dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path), zipdump=zipdump, zipstrings=zipstrings ) if do_strings: try: data = open(dmp_path, "r").read() except (IOError, OSError) as e: raise CuckooProcessingError("Error opening file %s" % e) if nulltermonly: apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00" strings = re.findall(apat, data) upat = "((?:[\x20-\x7e][\x00]){" + str(minchars) + ",})\x00\x00" strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)] f=open(dmp_path + ".strings", "w") f.write("\n".join(strings)) f.close() proc["strings_path"] = dmp_path + ".strings" else: apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00" strings = re.findall(apat, data) upat = "(?:[\x20-\x7e][\x00]){" + str(minchars) + ",}" strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)] f=open(dmp_path + ".strings", "w") f.write("\n".join(strings)) f.close() proc["strings_path"] = dmp_path + ".strings" zipstrings = self.options.get("zipstrings", False) if zipstrings: try: f = zipfile.ZipFile("%s.zip" % (proc["strings_path"]), "w") f.write(proc["strings_path"], os.path.basename(proc["strings_path"]), zipfile.ZIP_DEFLATED) f.close() os.remove(proc["strings_path"]) proc["strings_path"] = "%s.zip" % (proc["strings_path"]) except: raise CuckooProcessingError("Error creating Process Memory Strings Zip File %s" % e) # Deduplicate configs if proc["yara"]: for match in proc["yara"]: # Dyre if match["name"] == "DyreCfgInjectsList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<litem>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</litem>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique inject elements)" % len(output) elif match["name"] == "DyreCfgRedirectList": output = list() buf = "" recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("<rpcgroup>"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("</rpcgroup>"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] match["meta"]["description"] += " (Observed %d unique redirect elements)" % len(output) # DarkComet elif match["name"] == "DarkCometConfig": output = list() recline = False for ystring in match["strings"]: for line in ystring.splitlines(): if line.startswith("#BEGIN DARKCOMET"): buf = "" recline = True if recline: buf += line.strip() + "\n" if line.startswith("#EOF DARKCOMET"): recline = False if buf not in output: output.append(buf) match["strings"] = ["".join(output)] if zipdump: try: f = zipfile.ZipFile("%s.zip" % (dmp_path), "w") f.write(dmp_path, os.path.basename(dmp_path), zipfile.ZIP_DEFLATED) f.close() os.remove(dmp_path) proc["file"]="%s.zip" % (dmp_path) except: raise CuckooProcessingError("Error creating Process Memory Zip File %s" % e) results.append(proc) return results