Exemple #1
0
    def _get_versioninfo(self):
        """Get version info.
        @return: info dict or None.
        """
        if not self.pe:
            return None

        infos = []
        if hasattr(self.pe, "VS_VERSIONINFO"):
            if hasattr(self.pe, "FileInfo"):
                for entry in self.pe.FileInfo:
                    try:
                        if hasattr(entry, "StringTable"):
                            for st_entry in entry.StringTable:
                                for str_entry in st_entry.entries.items():
                                    entry = {}
                                    entry["name"] = convert_to_printable(str_entry[0])
                                    entry["value"] = convert_to_printable(str_entry[1])
                                    if entry["name"] == "Translation" and len(entry["value"]) == 10:
                                        entry["value"] = "0x0" + entry["value"][2:5] + " 0x0" + entry["value"][7:10]
                                    infos.append(entry)
                        elif hasattr(entry, "Var"):
                            for var_entry in entry.Var:
                                if hasattr(var_entry, "entry"):
                                    entry = {}
                                    entry["name"] = convert_to_printable(var_entry.entry.keys()[0])
                                    entry["value"] = convert_to_printable(var_entry.entry.values()[0])
                                    if entry["name"] == "Translation" and len(entry["value"]) == 10:
                                        entry["value"] = "0x0" + entry["value"][2:5] + " 0x0" + entry["value"][7:10]
                                    infos.append(entry)
                    except:
                        continue

        return infos
Exemple #2
0
    def _add_hosts(self, connection):
        """Add IPs to unique list.
        @param connection: connection data
        """
        try:
            if connection["src"] not in self.hosts:
                ip = convert_to_printable(connection["src"])

                # We consider the IP only if it hasn't been seen before.
                if ip not in self.hosts:
                    # If the IP is not a local one, this might be a leftover
                    # packet as described in issue #249.
                    if self._is_private_ip(ip):
                        self.hosts.append(ip)

            if connection["dst"] not in self.hosts:
                ip = convert_to_printable(connection["dst"])

                if ip not in self.hosts:
                    self.hosts.append(ip)

                    # We add external IPs to the list, only the first time
                    # we see them and if they're the destination of the
                    # first packet they appear in.
                    if not self._is_private_ip(ip):
                        self.unique_hosts.append(ip)
        except:
            pass
Exemple #3
0
 def _get_custom_attrs(self):
     try:
         ret = []
         output = Popen(["/usr/bin/monodis", "--customattr", file_path], stdout=PIPE).stdout.read().split("\n")
         for line in output[1:]:
             splitline = line.split(" ")
             typeval = splitline[1].rstrip(":")
             nameval = splitline[6].split("::")[0]
             if "(string)" not in splitline[6]:
                 continue
             rem = " ".join(splitline[7:])
             startidx = rem.find('["')
             if startidx < 0:
                 continue
             endidx = rem.rfind('"]')
             # also ignore empty strings
             if endidx <= 2:
                 continue
             valueval = rem[startidx + 2 : endidx - 2]
             item = dict()
             item["type"] = convert_to_printable(typeval)
             item["name"] = convert_to_printable(nameval)
             item["value"] = convert_to_printable(valueval)
             ret.append(item)
         return ret
     except:
         return None
Exemple #4
0
    def _unpack(self, buf):
        """Extract into a list irc messages of a tcp streams.
        @buf: tcp stream data
        """
        try:
            f = cStringIO.StringIO(buf)
            lines = f.readlines()
        except Exception:
            log.error("Failed reading tcp stream buffer")
            return False

        logirc = False
        for element in lines:
            if not re.match("^:", element) is None:
                command = "([a-zA-Z]+|[0-9]{3})"
                params = "(\x20.+)"
                irc_server_msg = re.findall("(^:[\w+.{}!@|()]+\x20)" + command + params, element)
                if irc_server_msg:
                    self._sc["prefix"] = convert_to_printable(irc_server_msg[0][0].strip())
                    self._sc["command"] = convert_to_printable(irc_server_msg[0][1].strip())
                    self._sc["params"] = convert_to_printable(irc_server_msg[0][2].strip())
                    self._sc["type"] = "server"
                    if logirc:
                        self._messages.append(dict(self._sc))
            else:
                irc_client_msg = re.findall("([a-zA-Z]+\x20)(.+[\x0a\0x0d])", element)
                if irc_client_msg and irc_client_msg[0][0].strip() in self.__methods_client:
                    self._cc["command"] = convert_to_printable(irc_client_msg[0][0].strip())
                    if self._cc["command"] in ["NICK", "USER"]:
                        logirc = True
                    self._cc["params"] = convert_to_printable(irc_client_msg[0][1].strip())
                    self._cc["type"] = "client"
                    if logirc:
                        self._messages.append(dict(self._cc))
Exemple #5
0
    def _add_hosts(self, connection):
        """Add IPs to unique list.
        @param connection: connection data
        """
        try:
            # TODO: Perhaps this block should be removed.
            # If there is a packet from a non-local IP address, which hasn't
            # been seen before, it means that the connection wasn't initiated
            # during the time of the current analysis.
            if connection["src"] not in self.hosts:
                ip = convert_to_printable(connection["src"])

                # We consider the IP only if it hasn't been seen before.
                if ip not in self.hosts:
                    # If the IP is not a local one, this might be a leftover
                    # packet as described in issue #249.
                    if self._is_private_ip(ip):
                        self.hosts.append(ip)

            if connection["dst"] not in self.hosts:
                ip = convert_to_printable(connection["dst"])

                if ip not in self.hosts:
                    self.hosts.append(ip)

                    # We add external IPs to the list, only the first time
                    # we see them and if they're the destination of the
                    # first packet they appear in.
                    if not self._is_private_ip(ip):
                        self.unique_hosts.append(ip)
        except:
            pass
Exemple #6
0
    def _add_hosts(self, connection):
        """Add IPs to unique list.
        @param connection: connection data
        """
        try:
            if connection["src"] not in self.hosts:
                ip = convert_to_printable(connection["src"])
                if ip in self.hosts:
                    return
                else:
                    self.hosts.append(ip)

                if not self._is_private_ip(ip):
                    self.unique_hosts.append(ip)

            if connection["dst"] not in self.hosts:
                ip = convert_to_printable(connection["dst"])
                if ip in self.hosts:
                    return
                else:
                    self.hosts.append(ip)

                if not self._is_private_ip(ip):
                    self.unique_hosts.append(ip)
        except:
            pass
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files = []

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                if file_name.endswith("_info.txt") and not os.path.exists(file_path + "_info.txt"):
                    continue
                guest_paths = [line.strip() for line in open(file_path + "_info.txt")]

                file_info = File(file_path=file_path,guest_paths=guest_paths).get_all()
                if "ASCII" in file_info["type"]:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(2049)
                    if len(filedata) > 2048:
                        file_info["data"] = convert_to_printable(filedata[:2048] + " <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)

                dropped_files.append(file_info)

        return dropped_files
Exemple #8
0
 def _add_hosts(self, connection):
     """
     Add IPs to unique list.
     @param connection: connection data
     """
     try:
         if connection["src"] not in self.unique_hosts:
             self.unique_hosts.append(convert_to_printable(connection["src"]))
         if connection["dst"] not in self.unique_hosts:
             self.unique_hosts.append(convert_to_printable(connection["dst"]))
     except Exception, why:
         return False
Exemple #9
0
 def _get_assembly_info(self):
     try:
         ret = dict()
         output = Popen(["/usr/bin/monodis", "--assembly", self.file_path], stdout=PIPE).stdout.read().split("\n")
         for line in output:
             if line.startswith("Name:"):
                 ret["name"] = convert_to_printable(line[5:].strip())
             if line.startswith("Version:"):
                 ret["version"] = convert_to_printable(line[8:].strip())
         return ret
     except:
         return None
    def _parse(self, row):
        """Parse log row.
        @param row: row data.
        @return: parsed information dict.
        """
        call = {}
        arguments = []

        try:
            timestamp = row[0]    # Timestamp of current API call invocation.
            thread_id = row[1]    # Thread ID.
            category = row[2]     # Win32 function category.
            api_name = row[3]     # Name of the Windows API.
            status_value = row[4] # Success or Failure?
            return_value = row[5] # Value returned by the function.
        except IndexError as e:
            log.debug("Unable to parse process log row: %s", e)
            return None

        # Now walk through the remaining columns, which will contain API
        # arguments.
        for index in range(6, len(row)):
            argument = {}

            # Split the argument name with its value based on the separator.
            try:
                arg_name, arg_value = row[index]
            except ValueError as e:
                log.debug("Unable to parse analysis row argument (row=%s): %s", row[index], e)
                continue

            argument["name"] = arg_name

            argument["value"] = convert_to_printable(cleanup_value(arg_value))
            arguments.append(argument)

        call["timestamp"] = timestamp
        call["thread_id"] = str(thread_id)
        call["category"] = category
        call["api"] = api_name
        call["status"] = bool(int(status_value))

        if isinstance(return_value, int):
            call["return"] = "0x%.08x" % return_value
        else:
            call["return"] = convert_to_printable(cleanup_value(return_value))

        call["arguments"] = arguments
        call["repeated"] = 0

        return call
Exemple #11
0
 def _process_smtp(self):
     """Process SMTP flow."""
     for conn, data in self.smtp_flow.iteritems():
         # Detect new SMTP flow.
         if data.startswith(("EHLO", "HELO")):
             self.smtp_requests.append({"dst": conn, 
                                        "raw": convert_to_printable(data)})
Exemple #12
0
    def _get_imported_symbols(self):
        """Gets imported symbols.
        @return: imported symbols dict or None.
        """
        if not self.pe:
            return None

        imports = []

        if hasattr(self.pe, "DIRECTORY_ENTRY_IMPORT"):
            for entry in self.pe.DIRECTORY_ENTRY_IMPORT:
                try:
                    symbols = []
                    for imported_symbol in entry.imports:
                        symbol = {}
                        symbol["address"] = hex(imported_symbol.address)
                        symbol["name"] = imported_symbol.name
                        symbols.append(symbol)

                    imports_section = {}
                    imports_section["dll"] = convert_to_printable(entry.dll)
                    imports_section["imports"] = symbols
                    imports.append(imports_section)
                except:
                    continue

        return imports
    def dissect(data):

        """Runs all ICMP dissectors.
        RFC 792
        @param conn: connection.
        @param data: payload data of protocol IP.
        """

        picmp = {}

        picmp["protocol_name"] = "ICMP"
        picmp["layer"] = 3
        # picmp["src"] = pip["src"]
        # picmp["dst"] = pip["dst"]
        picmp["type"] = data.type  # Type
        picmp["code"] = data.code  # Code
        picmp["checksum"] = data.sum  # Checksum

        # Extract data from dpkg.icmp.ICMP.
        try:
            picmp["data"] = convert_to_printable(data.data.data)
        except:
            picmp["data"] = ""

        return picmp
Exemple #14
0
    def _add_http(self, conn, tcpdata):
        """Adds an HTTP flow.
        @param conn: TCP connection info.
        @param tcpdata: TCP data flow.
        """
        if tcpdata in self.http_requests:
            self.http_requests[tcpdata]["count"] += 1
            return True

        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {"count": 1}

            if "host" in http.headers and re.match(
                    r'^([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9])(\.([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9]))+(:[0-9]{1,5})?$',
                    http.headers["host"], re.IGNORECASE):
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = conn["dst"]

            entry["port"] = conn["dport"]

            # Manually deal with cases when destination port is not the default one,
            # and it is  not included in host header.
            netloc = entry["host"]
            if entry["port"] != 80 and ":" not in netloc:
                netloc += ":" + str(entry["port"])

            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(urlunparse(("http",
                                                            netloc,
                                                            http.uri, None,
                                                            None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])
            else:
                entry["user-agent"] = ""

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests[tcpdata] = entry
        except Exception:
            return False

        return True
Exemple #15
0
    def _get_type_refs(self):
        try:
            ret = []
            output = Popen(["/usr/bin/monodis", "--typeref", self.file_path], stdout=PIPE).stdout.read().split("\n")
            for line in output[1:]:
                restline = ''.join(line.split(":")[1:])
                restsplit = restline.split("]")
                asmname = restsplit[0][2:]
                typename = ''.join(restsplit[1:])
                if asmname and typename:
                    item = dict()
                    item["assembly"] = convert_to_printable(asmname)
                    item["typename"] = convert_to_printable(typename)
                    ret.append(item)
            return sorted(ret)

        except:
            return None
Exemple #16
0
    def dissect(irc):
        __methods_client = dict.fromkeys(("PASS", "JOIN", "USER", "OPER", "MODE", "SERVICE", "QUIT", "SQUIT",
            "PART", "TOPIC", "NAMES", "LIST", "INVITE",
            "KICK", "PRIVMSG", "NOTICE", "MOTD", "LUSERS", "VERSION", "STATS", "LINKS", "TIME", "CONNECT",
            "TRACE", "ADMIN", "INFO", "SERVLIST",
            "SQUERY", "WHO", "WHOIS", "WHOWAS", "KILL", "PING", "PONG", "ERROR", "AWAY", "REHASH", "DIE", "RESTART",
            "SUMMON", "USERS", "WALLOPS",
            "USERHOST", "NICK", "ISON"))

        _messages = []
        _sc = {}
        _cc = {}

        try:
            f = cStringIO.StringIO(irc)
            lines = f.readlines()
        except Exception:
            return False

        for element in lines:
            if not re.match("^:", element) is None:
                command = "([a-zA-Z]+|[0-9]{3})"
                params = "(\x20.+)"
                irc_server_msg = re.findall("(^:[\w+.{}!@|()]+\x20)"+command+params,element)
                if irc_server_msg:
                    _sc["prefix"] = convert_to_printable(irc_server_msg[0][0].strip())
                    _sc["command"] = convert_to_printable(irc_server_msg[0][1].strip())
                    _sc["params"] = convert_to_printable(irc_server_msg[0][2].strip())
                    _sc["type"] = "server"
                    _messages.append(dict(_sc))
            else:
                irc_client_msg = re.findall("([a-zA-Z]+\x20)(.+[\x0a\0x0d])",element)
                if irc_client_msg and irc_client_msg[0][0].strip() in __methods_client:
                    _cc["command"] = convert_to_printable(irc_client_msg[0][0].strip())
                    _cc["params"] = convert_to_printable(irc_client_msg[0][1].strip())
                    _cc["type"] = "client"
                    _messages.append(dict(_cc))

        pirc={}
        pirc["layer"] = 7
        pirc["protocol_name"] = "IRC"
        pirc["messages"] = _messages

        return pirc
Exemple #17
0
    def _get_exported_dll_name(self):
        """Gets exported DLL name, if any
        @return: exported DLL name as string or None.
        """
        if not self.pe:
            return None

        if hasattr(self.pe, "DIRECTORY_ENTRY_EXPORT"):
            return convert_to_printable(self.pe.get_string_at_rva(self.pe.DIRECTORY_ENTRY_EXPORT.struct.Name))
        return None
Exemple #18
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files = []
        buf = self.options.get("buffer", 8192)

        if self.task["category"] == "pcap":
            return dropped_files

        file_names = os.listdir(self.dropped_path)
        for file_name in file_names:
            file_path = os.path.join(self.dropped_path, file_name)
            if not os.path.isfile(file_path):
                continue
            if file_name.endswith("_info.txt"):
                continue
            guest_paths = [line.strip() for line in open(file_path + "_info.txt")]
            guest_name = guest_paths[0].split("\\")[-1]
            file_info = File(file_path=file_path,guest_paths=guest_paths, file_name=guest_name).get_all()
            texttypes = [
                "ASCII",
                "Windows Registry text",
                "XML document text",
                "Unicode text",
            ]
            readit = False
            for texttype in texttypes:
                if texttype in file_info["type"]:
                    readit = True
                    break
            if readit:
                with open(file_info["path"], "r") as drop_open:
                    filedata = drop_open.read(buf + 1)
                if len(filedata) > buf:
                    file_info["data"] = convert_to_printable(filedata[:buf] + " <truncated>")
                else:
                    file_info["data"] = convert_to_printable(filedata)

            dropped_files.append(file_info)

        return dropped_files
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files = []
        buf = self.options.get("buffer", 8192)

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                if file_name.endswith("_info.txt") and not os.path.exists(file_path + "_info.txt"):
                    continue
                guest_paths = [line.strip() for line in open(file_path + "_info.txt")]

                file_info = File(file_path=file_path,guest_paths=guest_paths).get_all()
                # Used by ElasticSearch to find the file on disk
                # since they are in random generated directories
                if Config("reporting").get("elasticsearchdb").get("enabled"):
                    file_info["dropdir"] = file_path.split("/")[-2]
                texttypes = [
                    "ASCII",
                    "Windows Registry text",
                    "XML document text",
                    "Unicode text",
                ]
                readit = False
                for texttype in texttypes:
                    if texttype in file_info["type"]:
                        readit = True
                        break
                if readit:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)
                    if len(filedata) > buf:
                        file_info["data"] = convert_to_printable(filedata[:buf] + " <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)

                dropped_files.append(file_info)

        return dropped_files
Exemple #20
0
    def _get_pdb_path(self):
        if not self.pe:
            return None

        try:
            for dbg in self.pe.DIRECTORY_ENTRY_DEBUG:
                dbgst = dbg.struct
                dbgdata = self.pe.__data__[dbgst.PointerToRawData : dbgst.PointerToRawData + dbgst.SizeOfData]
                if dbgst.Type == 4:  # MISC
                    datatype, length, uniflag = struct.unpack_from("IIB", dbgdata)
                    return convert_to_printable(str(dbgdata[12:length]).rstrip("\0"))
                elif dbgst.Type == 2:  # CODEVIEW
                    if dbgdata[:4] == "RSDS":
                        return convert_to_printable(str(dbgdata[24:]).rstrip("\0"))
                    elif dbgdata[:4] == "NB10":
                        return convert_to_printable(str(dbgdata[16:]).rstrip("\0"))
        except:
            pass

        return None
Exemple #21
0
    def _add_http(self, tcpdata, dport):
        """Adds an HTTP flow.
        @param tcpdata: TCP data flow.
        @param dport: destination port.
        """
        if tcpdata in self.http_requests:
            self.http_requests[tcpdata]["count"] += 1
            return True

        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {"count": 1}

            if "host" in http.headers:
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = ""

            entry["port"] = dport

            # Manually deal with cases when destination port is not the default one,
            # and it is  not included in host header.
            netloc = entry["host"]
            if dport != 80 and ":" not in netloc:
                netloc += ":" + str(entry["port"])

            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(urlunparse(("http",
                                                            netloc,
                                                            http.uri, None,
                                                            None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests[tcpdata] = entry
        except Exception:
            return False

        return True
Exemple #22
0
    def _get_assembly_refs(self):
        try:
            ret = []
            output = Popen(["/usr/bin/monodis", "--assemblyref", file_path], stdout=PIPE).stdout.read().split("\n")
            for idx in range(len(output)):
                splitline = output[idx].split("Version=")
                if len(splitline) < 2:
                    continue
                verval = splitline[1]
                splitline = output[idx + 1].split("Name=")
                if len(spitline) < 2:
                    continue
                nameval = splitline[1]
                item = dict()
                item["name"] = convert_to_printable(nameval)
                item["version"] = convert_to_printable(verval)
                ret.append(item)
            return ret

        except:
            return None
Exemple #23
0
    def check_file(self):
        """Checks the integrity of the file to be analyzed."""
        sample = self.db.view_sample(self.task.sample_id)

        sha256 = File(self.task.target).get_sha256()
        if sha256 != sample.sha256:
            log.error(
                "Task #{0}: Target file has been modified after submission: "
                "'{1}'".format(self.task.id,
                               convert_to_printable(self.task.target)))
            return False

        return True
Exemple #24
0
    def check_file(self, sha256):
        """Checks the integrity of the file to be analyzed."""
        sample = self.db.view_sample(self.task.sample_id)

        if sha256 != sample.sha256:
            log.error(
                "Task #%s: Target file has been modified after submission: '%s'",
                self.task.id,
                convert_to_printable(self.task.target),
            )
            return False

        return True
Exemple #25
0
    def _add_http(self, tcpdata, dport):
        """Adds an HTTP flow.
        @param tcpdata: TCP data flow.
        @param dport: destination port.
        """
        if tcpdata in self.http_requests:
            self.http_requests[tcpdata]["count"] += 1
            return True

        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {"count": 1}

            if "host" in http.headers:
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = ""

            entry["port"] = dport

            # Manually deal with cases when destination port is not the default one,
            # and it is  not included in host header.
            netloc = entry["host"]
            if dport != 80 and ":" not in netloc:
                netloc += ":" + str(entry["port"])

            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(
                urlunparse(("http", netloc, http.uri, None, None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests[tcpdata] = entry
        except Exception:
            return False

        return True
Exemple #26
0
    def _get_assembly_refs(self):
        try:
            ret = []
            output = Popen(["/usr/bin/monodis", "--assemblyref", file_path],
                           stdout=PIPE).stdout.read().split("\n")
            for idx in range(len(output)):
                splitline = output[idx].split("Version=")
                if len(splitline) < 2:
                    continue
                verval = splitline[1]
                splitline = output[idx + 1].split("Name=")
                if len(spitline) < 2:
                    continue
                nameval = splitline[1]
                item = dict()
                item["name"] = convert_to_printable(nameval)
                item["version"] = convert_to_printable(verval)
                ret.append(item)
            return ret

        except:
            return None
Exemple #27
0
 def _get_custom_attrs(self) -> List[Dict[str, str]]:
     try:
         ret = []
         output = subprocess.check_output(["/usr/bin/monodis", "--customattr", self.file_path], universal_newlines=False).split(
             b"\n"
         )
         for line in output[1:]:
             splitline = line.decode("latin-1").split()
             if not splitline or len(splitline) < 7:
                 continue
             typeval = splitline[1].rstrip(":")
             nameval = splitline[6].split("::", 1)[0]
             if "(string)" not in splitline[6]:
                 continue
             rem = " ".join(splitline[7:])
             startidx = rem.find('["')
             if startidx < 0:
                 continue
             endidx = rem.rfind('"]')
             # also ignore empty strings
             if endidx <= 2:
                 continue
             valueval = rem[startidx + 2 : endidx - 2]
             ret.append(
                 {
                     "type": convert_to_printable(typeval),
                     "name": convert_to_printable(nameval),
                     "value": convert_to_printable(valueval),
                 }
             )
         return ret
     except UnicodeDecodeError:
         log.error("UnicodeDecodeError: /usr/bin/monodis --customattr %s", self.file_path)
     except subprocess.CalledProcessError as e:
         log.error("Monodis: %s", str(e))
     except Exception as e:
         log.error(e, exc_info=True)
         return None
Exemple #28
0
    def _unpack(self, buf):
        """ 
        Extract into a list irc messages of a tcp streams
        @buf: tcp stream data
        """
        try:
            f = cStringIO.StringIO(buf)
            lines = f.readlines()
        except Exception:
            log.error("Failed reading tcp stream buffer")
            return False

        for element in lines:
            if re.match("^:", element) != None:
                command = "([a-zA-Z]+|[0-9]{3})"
                params = "(\x20.+)"
                irc_server_msg = re.findall(
                    "(^:[\w+.{}!@|()]+\x20)" + command + params, element)
                if irc_server_msg:
                    self._sc["prefix"] = convert_to_printable(
                        irc_server_msg[0][0].strip())
                    self._sc["command"] = convert_to_printable(
                        irc_server_msg[0][1].strip())
                    self._sc["params"] = convert_to_printable(
                        irc_server_msg[0][2].strip())
                    self._sc["type"] = "server"
                self._messages.append(dict(self._sc))
            else:
                irc_client_msg = re.findall("([a-zA-Z]+\x20)(.+[\x0a\0x0d])",
                                            element)
                if irc_client_msg and irc_client_msg[0][0].strip(
                ) in self.__methods_client:
                    self._cc["command"] = convert_to_printable(
                        irc_client_msg[0][0].strip())
                    self._cc["params"] = convert_to_printable(
                        irc_client_msg[0][1].strip())
                    self._cc["type"] = "client"
                self._messages.append(dict(self._cc))
Exemple #29
0
    def _get_type_refs(self) -> List[Dict[str, str]]:
        try:
            ret = []
            output = subprocess.check_output(["/usr/bin/monodis", "--typeref", self.file_path], universal_newlines=False).split(
                b"\n"
            )
            for line in output[1:]:
                restline = "".join(line.decode("latin-1").split(":")[1:])
                restsplit = restline.split("]")
                asmname = restsplit[0][2:]
                typename = "".join(restsplit[1:])
                if asmname and typename:
                    item = {
                        "assembly": convert_to_printable(asmname),
                        "typename": convert_to_printable(typename),
                    }
                    ret.append(item)
            return ret

        except subprocess.CalledProcessError as e:
            log.error("Monodis: %s", str(e))
        except Exception as e:
            log.error(e, exc_info=True)
            return None
Exemple #30
0
    def process_call(self, call):
        """This function converts all arguments to strings to allow ES to map
        them properly."""
        if "arguments" not in call or type(call["arguments"]) != dict:
            return call

        new_arguments = {}
        for key, value in call["arguments"].iteritems():
            if type(value) is unicode or type(value) is str:
                new_arguments[key] = convert_to_printable(value)
            else:
                new_arguments[key] = str(value)

        call["arguments"] = new_arguments
        return call
Exemple #31
0
    def process_call(self, call):
        """This function converts all arguments to strings to allow ES to map
        them properly."""
        if "arguments" not in call or type(call["arguments"]) != dict:
            return call

        new_arguments = {}
        for key, value in call["arguments"].iteritems():
            if type(value) is unicode or type(value) is str:
                new_arguments[key] = convert_to_printable(value)
            else:
                new_arguments[key] = str(value)

        call["arguments"] = new_arguments
        return call
Exemple #32
0
def extract_iocs(s):
    for desc, pattern in PATTERNS:
        m = pattern.findall(s)
        if m:
            # Hacked-up buxfix for multilayer Chr(Asc(Chr(Asc( which can
            # sometimes mess up our quoted string extraction / parsing.
            while "Chr(Asc(" in s:
                lastline = s
                s = re.sub(r'(?i)Chr\(Asc\((.+?)\)\)', r"\1", s)
                if s == lastline:
                    break
            # Return the line matched and not m because I prefer to have
            # context and not simply the IOC. This helps with the executable
            # file IOC, sometimes it's a save location!
            return desc, convert_to_printable(s)
    return None
def extract_iocs(s):
    for desc, pattern in PATTERNS:
        m = pattern.findall(s)
        if m:
            # Hacked-up buxfix for multilayer Chr(Asc(Chr(Asc( which can
            # sometimes mess up our quoted string extraction / parsing.
            while "Chr(Asc(" in s:
                lastline = s
                s = re.sub(r'(?i)Chr\(Asc\((.+?)\)\)', r"\1", s)
                if s == lastline:
                    break
            # Return the line matched and not m because I prefer to have
            # context and not simply the IOC. This helps with the executable
            # file IOC, sometimes it's a save location!
            return desc, convert_to_printable(s)
    return None
Exemple #34
0
def proctreetolist(tree):
    outlist = []
    if not tree:
        return outlist
    stack = deque(tree)
    while stack:
        node = stack.popleft()
        is_special = False
        if "startchildren" in node or "endchildren" in node:
            is_special = True
            outlist.append(node)
        else:
            newnode = {}
            newnode["pid"] = node["pid"]
            newnode["name"] = node["name"]
            newnode["module_path"] = node["module_path"]
            if "environ" in node and "CommandLine" in node["environ"]:
                cmdline = node["environ"]["CommandLine"]
                if cmdline.startswith("\""):
                    splitcmdline = cmdline[cmdline[1:].index("\"") +
                                           2:].split()
                    argv0 = cmdline[:cmdline[1:].index("\"") + 1].lower()
                    if node["module_path"].lower() in argv0:
                        cmdline = " ".join(splitcmdline).strip()
                    else:
                        cmdline = node["environ"]["CommandLine"]
                elif cmdline:
                    splitcmdline = cmdline.split()
                    if splitcmdline:
                        argv0 = splitcmdline[0].lower()
                        if node["module_path"].lower() in argv0:
                            cmdline = " ".join(splitcmdline[1:]).strip()
                        else:
                            cmdline = node["environ"]["CommandLine"]
                if len(cmdline) >= 200 + 15:
                    cmdline = cmdline[:200] + " ...(truncated)"
                newnode["commandline"] = convert_to_printable(cmdline)
            outlist.append(newnode)
        if is_special:
            continue
        if node["children"]:
            stack.appendleft({"endchildren": 1})
            stack.extendleft(reversed(node["children"]))
            stack.appendleft({"startchildren": 1})
    return outlist
Exemple #35
0
    def _add_hosts(self, connection):
        """Add IPs to unique list.
        @param connection: connection data
        """
        try:
            if connection["dst"] not in self.hosts:
                ip = convert_to_printable(connection["dst"])

                if ip not in self.hosts:
                    self.hosts.append(ip)

                    # We add external IPs to the list, only the first time
                    # we see them and if they're the destination of the
                    # first packet they appear in.
                    if not self._is_private_ip(ip):
                        self.unique_hosts.append(ip)
        except:
            pass
Exemple #36
0
    def _get_exported_symbols(self):
        """Gets exported symbols.
        @return: list of dicts of exported symbols or None.
        """
        if not self.pe:
            return None

        exports = []

        if hasattr(self.pe, "DIRECTORY_ENTRY_EXPORT"):
            for exported_symbol in self.pe.DIRECTORY_ENTRY_EXPORT.symbols:
                symbol = {}
                symbol["address"] = hex(self.pe.OPTIONAL_HEADER.ImageBase + exported_symbol.address)
                symbol["name"] = convert_to_printable(exported_symbol.name)
                symbol["ordinal"] = exported_symbol.ordinal
                exports.append(symbol)

        return exports
Exemple #37
0
    def _add_hosts(self, connection):
        """Add IPs to unique list.
        @param connection: connection data
        """
        try:
            if connection["dst"] not in self.hosts:
                ip = convert_to_printable(connection["dst"])

                if ip not in self.hosts:
                    self.hosts.append(ip)

                    # We add external IPs to the list, only the first time
                    # we see them and if they're the destination of the
                    # first packet they appear in.
                    if not self._is_private_ip(ip):
                        self.unique_hosts.append(ip)
        except:
            pass
Exemple #38
0
    def _get_sections(self):
        """Gets sections.
        @return: sections dict or None.
        """
        sections = []

        for entry in self.pe.sections:
            try:
                section = {}
                section["name"] = convert_to_printable(entry.Name.strip("\x00"))
                section["virtual_address"] = "0x{0:08x}".format(entry.VirtualAddress)
                section["virtual_size"] = "0x{0:08x}".format(entry.Misc_VirtualSize)
                section["size_of_data"] = "0x{0:08x}".format(entry.SizeOfRawData)
                section["entropy"] = entry.get_entropy()
                sections.append(section)
            except:
                continue

        return sections
Exemple #39
0
    def _get_exported_symbols(self):
        """Gets exported symbols.
        @return: list of dicts of exported symbols or None.
        """
        if not self.pe:
            return None

        exports = []

        if hasattr(self.pe, "DIRECTORY_ENTRY_EXPORT"):
            for exported_symbol in self.pe.DIRECTORY_ENTRY_EXPORT.symbols:
                symbol = {}
                symbol["address"] = hex(self.pe.OPTIONAL_HEADER.ImageBase +
                                        exported_symbol.address)
                symbol["name"] = convert_to_printable(exported_symbol.name)
                symbol["ordinal"] = exported_symbol.ordinal
                exports.append(symbol)

        return exports
Exemple #40
0
def proctreetolist(tree):
    stack = deque(tree)
    outlist = []
    while stack:
        node = stack.popleft()
        is_special = False
        if "startchildren" in node or "endchildren" in node:
            is_special = True
            outlist.append(node)
        else:
            newnode = {}
            newnode["pid"] = node["pid"]
            newnode["name"] = node["name"]
            newnode["module_path"] = node["module_path"]
            if "environ" in node and "CommandLine" in node["environ"]:
                cmdline = node["environ"]["CommandLine"]
                if cmdline.startswith("\""):
                    splitcmdline = cmdline[cmdline[1:].index("\"")+2:].split()
                    argv0 = cmdline[:cmdline[1:].index("\"")+1].lower()
                    if node["module_path"].lower() in argv0:
                        cmdline = " ".join(splitcmdline).strip()
                    else:
                        cmdline = node["environ"]["CommandLine"]
                elif cmdline:
                    splitcmdline = cmdline.split()
                    if splitcmdline:
                        argv0 = splitcmdline[0].lower()
                        if node["module_path"].lower() in argv0:
                            cmdline = " ".join(splitcmdline[1:]).strip()
                        else:
                            cmdline = node["environ"]["CommandLine"]
                if len(cmdline) >= 200 + 15:
                    cmdline = cmdline[:200] + " ...(truncated)"
                newnode["commandline"] = convert_to_printable(cmdline)
            outlist.append(newnode)
        if is_special:
            continue
        if node["children"]:
            stack.appendleft({"endchildren" : 1})
            stack.extendleft(reversed(node["children"]))
            stack.appendleft({"startchildren" : 1})
    return outlist
    
Exemple #41
0
    def _get_sections(self):
        """Gets sections.
        @return: sections dict or None.
        """
        sections = []

        for entry in self.pe.sections:
            try:
                section = {}
                section["name"] = convert_to_printable(entry.Name.strip("\x00"))
                section["virtual_address"] = "0x{0:08x}".format(entry.VirtualAddress)
                section["virtual_size"] = "0x{0:08x}".format(entry.Misc_VirtualSize)
                section["size_of_data"] = "0x{0:08x}".format(entry.SizeOfRawData)
                section["entropy"] = entry.get_entropy()
                sections.append(section)
            except:
                continue

        return sections
Exemple #42
0
    def _add_http(self, tcpdata, dport):
        """Adds an HTTP flow.
        @param tcpdata: TCP data flow.
        @param dport: destination port.
        """
        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {}

            if "host" in http.headers:
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = ""

            entry["port"] = dport
            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(urlunparse(("http",
                                                            entry["host"],
                                                            http.uri, None,
                                                            None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests.append(entry)
        except Exception:
            return False

        return True
Exemple #43
0
    def _add_http(self, tcpdata, dport):
        """Adds an HTTP flow.
        @param tcpdata: TCP data flow.
        @param dport: destination port.
        """
        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {}

            if "host" in http.headers:
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = ""

            entry["port"] = dport
            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(urlunparse(("http",
                                                            entry["host"],
                                                            http.uri, None,
                                                            None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests.append(entry)
        except Exception:
            return False

        return True
Exemple #44
0
    def _add_http(self, tcpdata, dport):
        """
        Adds an HTTP flow.
        @param tcpdata: TCP data in flow
        @param dport: destination port
        """  
        http = dpkt.http.Request(tcpdata)

        try:
            entry = {}

            if "host" in http.headers:
                entry["host"] = convert_to_printable(http.headers['host'])
            else:
                entry["host"] = ""

            entry["port"] = dport
            entry["data"] = convert_to_printable(tcpdata)

            if entry["port"] != 80:
                host = "%s:%d" % (entry["host"], entry["port"])
            else:
                host = entry["host"]
            entry["uri"] = convert_to_printable(urlunparse(("http", host, http.uri, None, None, None)))

            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = convert_to_printable(http.headers["user-agent"])

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests.append(entry)
        except Exception, why:
            return False
Exemple #45
0
    def _icmp_dissect(self, conn, data):
        """Runs all ICMP dissectors.
        @param conn: connection.
        @param data: payload data.
        """

        if not self._check_icmp(data):
            return

        # If ICMP packets are coming from the host, it probably isn't
        # relevant traffic, hence we can skip from reporting it.
        if conn["src"] == cfg.resultserver.ip:
            return

        entry = {"src": conn["src"], "dst": conn["dst"], "type": data.type}
        # Extract data from dpkg.icmp.ICMP.
        try:
            entry["data"] = convert_to_printable(data.data.data)
        except Exception:
            entry["data"] = ""

        self.icmp_requests.append(entry)
Exemple #46
0
    def store_file(self, sha256):
        """Store a copy of the file being analyzed."""
        if not os.path.exists(self.task.target):
            log.error(
                "Task #{0}: The file to analyze does not exist at path '{1}', "
                "analysis aborted".format(
                    self.task.id, convert_to_printable(self.task.target)))
            return False

        self.binary = os.path.join(CUCKOO_ROOT, "storage", "binaries", sha256)

        if os.path.exists(self.binary):
            log.info("Task #{0}: File already exists at '{1}'".format(
                self.task.id, self.binary))
        else:
            # TODO: do we really need to abort the analysis in case we are not
            # able to store a copy of the file?
            try:
                shutil.copy(self.task.target, self.binary)
            except (IOError, shutil.Error) as e:
                log.error(
                    "Task #{0}: Unable to store file from '{1}' to '{2}', "
                    "analysis aborted".format(self.task.id, self.task.target,
                                              self.binary))
                return False

        try:
            new_binary_path = os.path.join(self.storage, "binary")

            if hasattr(os, "symlink"):
                os.symlink(self.binary, new_binary_path)
            else:
                shutil.copy(self.binary, new_binary_path)
        except (AttributeError, OSError) as e:
            log.error("Task #{0}: Unable to create symlink/copy from '{1}' to "
                      "'{2}': {3}".format(self.task.id, self.binary,
                                          self.storage, e))

        return True
Exemple #47
0
    def _get_imported_symbols(self):
        """Gets imported symbols.
        @return: imported symbols dict or None.
        """
        imports = []

        for entry in getattr(self.pe, "DIRECTORY_ENTRY_IMPORT", []):
            try:
                symbols = []
                for imported_symbol in entry.imports:
                    symbols.append({
                        "address": hex(imported_symbol.address),
                        "name": imported_symbol.name,
                    })

                imports.append({
                    "dll": convert_to_printable(entry.dll),
                    "imports": symbols,
                })
            except:
                log.exception("Unable to parse imported symbols.")

        return imports
Exemple #48
0
 def _get_file_header(self) -> Dict[str, str]:
     return {
         # TODO
         "magic": convert_to_printable(self.elf.e_ident_raw[:4]),
         "class": describe_ei_class(self.elf.header.e_ident["EI_CLASS"]),
         "data": describe_ei_data(self.elf.header.e_ident["EI_DATA"]),
         "ei_version": describe_ei_version(self.elf.header.e_ident["EI_VERSION"]),
         "os_abi": describe_ei_osabi(self.elf.header.e_ident["EI_OSABI"]),
         "abi_version": self.elf.header.e_ident["EI_ABIVERSION"],
         "type": describe_e_type(self.elf.header["e_type"]),
         "machine": describe_e_machine(self.elf.header["e_machine"]),
         "version": describe_e_version_numeric(self.elf.header["e_version"]),
         "entry_point_address": self._print_addr(self.elf.header["e_entry"]),
         "start_of_program_headers": self.elf.header["e_phoff"],
         "start_of_section_headers": self.elf.header["e_shoff"],
         "flags": f"{self._print_addr(self.elf.header['e_flags'])}{self._decode_flags(self.elf.header['e_flags'])}",
         "size_of_this_header": self.elf.header["e_ehsize"],
         "size_of_program_headers": self.elf.header["e_phentsize"],
         "number_of_program_headers": self.elf.header["e_phnum"],
         "size_of_section_headers": self.elf.header["e_shentsize"],
         "number_of_section_headers": self.elf.header["e_shnum"],
         "section_header_string_table_index": self.elf.header["e_shstrndx"],
     }
Exemple #49
0
    def _get_imported_symbols(self):
        """Gets imported symbols.
        @return: imported symbols dict or None.
        """
        imports = []

        if hasattr(self.pe, "DIRECTORY_ENTRY_IMPORT"):
            for entry in self.pe.DIRECTORY_ENTRY_IMPORT:
                try:
                    symbols = []
                    for imported_symbol in entry.imports:
                        symbol = {}
                        symbol["address"] = hex(imported_symbol.address)
                        symbol["name"] = imported_symbol.name
                        symbols.append(symbol)

                    imports_section = {}
                    imports_section["dll"] = convert_to_printable(entry.dll)
                    imports_section["imports"] = symbols
                    imports.append(imports_section)
                except:
                    continue

        return imports
Exemple #50
0
    def _get_sections(self):
        """Gets sections.
        @return: sections dict or None.
        """
        if not self.pe:
            return None

        sections = []

        for entry in self.pe.sections:
            try:
                section = {}
                section["name"] = convert_to_printable(entry.Name.strip("\x00"))
                section["virtual_address"] = "0x{0:08x}".format(entry.VirtualAddress)
                section["virtual_size"] = "0x{0:08x}".format(entry.Misc_VirtualSize)
                section["size_of_data"] = "0x{0:08x}".format(entry.SizeOfRawData)
                section["characteristics"] = self._convert_section_characteristics(entry.Characteristics)
                section["characteristics_raw"] = "0x{0:08x}".format(entry.Characteristics)
                section["entropy"] = "{0:.02f}".format(float(entry.get_entropy()))
                sections.append(section)
            except:
                continue

        return sections
Exemple #51
0
    def run(self):
        """Run Suricata.
        @return: hash with alerts
        """
        self.key = "suricata"
        # General
        SURICATA_CONF = self.options.get("conf", None)
        SURICATA_EVE_LOG = self.options.get("evelog", None)
        SURICATA_ALERT_LOG = self.options.get("alertlog", None)
        SURICATA_TLS_LOG = self.options.get("tlslog", None)
        SURICATA_HTTP_LOG = self.options.get("httplog", None)
        SURICATA_SSH_LOG = self.options.get("sshlog", None)
        SURICATA_DNS_LOG = self.options.get("dnslog", None)
        SURICATA_FILE_LOG = self.options.get("fileslog", None)
        SURICATA_FILES_DIR = self.options.get("filesdir", None)
        SURICATA_RUNMODE = self.options.get("runmode", None)
        SURICATA_FILE_BUFFER = self.options.get("buffer", 8192)
        Z7_PATH = self.options.get("7zbin", None)
        FILES_ZIP_PASS = self.options.get("zippass", None)

        # Socket
        SURICATA_SOCKET_PATH = self.options.get("socket_file", None)

        # Command Line
        SURICATA_BIN = self.options.get("bin", None)

        suricata = dict()
        suricata["alerts"] = []
        suricata["tls"] = []
        suricata["perf"] = []
        suricata["files"] = []
        suricata["http"] = []
        suricata["dns"] = []
        suricata["ssh"] = []
        suricata["fileinfo"] = []

        suricata["eve_log_full_path"] = None
        suricata["alert_log_full_path"] = None
        suricata["tls_log_full_path"] = None
        suricata["http_log_full_path"] = None
        suricata["file_log_full_path"] = None
        suricata["ssh_log_full_path"] = None
        suricata["dns_log_full_path"] = None

        tls_items = [
            "fingerprint", "issuer", "version", "subject", "sni", "ja3",
            "serial"
        ]

        SURICATA_ALERT_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                  SURICATA_ALERT_LOG)
        SURICATA_TLS_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_TLS_LOG)
        SURICATA_HTTP_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                 SURICATA_HTTP_LOG)
        SURICATA_SSH_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_SSH_LOG)
        SURICATA_DNS_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_DNS_LOG)
        SURICATA_EVE_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_EVE_LOG)
        SURICATA_FILE_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                 SURICATA_FILE_LOG)
        SURICATA_FILES_DIR_FULL_PATH = "%s/%s" % (self.logs_path,
                                                  SURICATA_FILES_DIR)

        separate_log_paths = [
            ("alert_log_full_path", SURICATA_ALERT_LOG_FULL_PATH),
            ("tls_log_full_path", SURICATA_TLS_LOG_FULL_PATH),
            ("http_log_full_path", SURICATA_HTTP_LOG_FULL_PATH),
            ("ssh_log_full_path", SURICATA_SSH_LOG_FULL_PATH),
            ("dns_log_full_path", SURICATA_DNS_LOG_FULL_PATH)
        ]

        # handle reprocessing
        all_log_paths = [x[1] for x in separate_log_paths] + \
            [SURICATA_EVE_LOG_FULL_PATH, SURICATA_FILE_LOG_FULL_PATH]
        for log_path in all_log_paths:
            if os.path.exists(log_path):
                try:
                    os.unlink(log_path)
                except:
                    pass
        if os.path.isdir(SURICATA_FILES_DIR_FULL_PATH):
            try:
                shutil.rmtree(SURICATA_FILES_DIR_FULL_PATH, ignore_errors=True)
            except:
                pass

        if not os.path.exists(SURICATA_CONF):
            log.warning(
                "Unable to Run Suricata: Conf File {} Does Not Exist".format(
                    SURICATA_CONF))
            return suricata
        if not os.path.exists(self.pcap_path):
            log.warning(
                "Unable to Run Suricata: Pcap file {} Does Not Exist".format(
                    self.pcap_path))
            return suricata

        # Add to this if you wish to ignore any SIDs for the suricata alert logs
        # Useful for ignoring SIDs without disabling them. Ex: surpress an alert for
        # a SID which is a dependent of another. (Bad TCP data for HTTP(S) alert)
        sid_blacklist = [
            # SURICATA FRAG IPv6 Fragmentation overlap
            2200074,
            # ET INFO InetSim Response from External Source Possible SinkHole
            2017363,
            # SURICATA UDPv4 invalid checksum
            2200075,
            # ET POLICY SSLv3 outbound connection from client vulnerable to POODLE attack
            2019416,
        ]

        if SURICATA_RUNMODE == "socket":
            try:
                #from suricatasc import SuricataSC
                from lib.cuckoo.common.suricatasc import SuricataSC
            except Exception as e:
                log.warning("Failed to import suricatasc lib {}".format(e))
                return suricata

            loopcnt = 0
            maxloops = 24
            loopsleep = 5

            args = dict()
            args["filename"] = self.pcap_path
            args["output-dir"] = self.logs_path

            suris = SuricataSC(SURICATA_SOCKET_PATH)
            try:
                suris.connect()
                suris.send_command("pcap-file", args)
            except Exception as e:
                log.warning(
                    "Failed to connect to socket and send command {}: {}".
                    format(SURICATA_SOCKET_PATH, e))
                return suricata
            while loopcnt < maxloops:
                try:
                    pcap_flist = suris.send_command("pcap-file-list")
                    current_pcap = suris.send_command("pcap-current")
                    log.debug("pcapfile list: {} current pcap: {}".format(
                        pcap_flist, current_pcap))

                    if self.pcap_path not in pcap_flist["message"]["files"] and \
                            current_pcap["message"] != self.pcap_path:
                        log.debug(
                            "Pcap not in list and not current pcap lets assume it's processed"
                        )
                        break
                    else:
                        loopcnt = loopcnt + 1
                        time.sleep(loopsleep)
                except Exception as e:
                    log.warning(
                        "Failed to get pcap status breaking out of loop {}".
                        format(e))
                    break

            if loopcnt == maxloops:
                logstr = "Loop timeout of {} sec occurred waiting for file {} to finish processing"
                log.warning(logstr.format(maxloops * loopsleep, current_pcap))
                return suricata
        elif SURICATA_RUNMODE == "cli":
            if not os.path.exists(SURICATA_BIN):
                log.warning(
                    "Unable to Run Suricata: Bin File {} Does Not Exist".
                    format(SURICATA_CONF))
                return suricata["alerts"]
            cmdstr = "{} -c {} -k none -l {} -r {}"
            cmd = cmdstr.format(SURICATA_BIN, SURICATA_CONF, self.logs_path,
                                self.pcap_path)
            ret, _, stderr = self.cmd_wrapper(cmd)
            if ret != 0:
                log.warning(
                    "Suricata returned a Exit Value Other than Zero {}".format(
                        stderr))
                return suricata

        else:
            log.warning("Unknown Suricata Runmode")
            return suricata

        datalist = []
        if os.path.exists(SURICATA_EVE_LOG_FULL_PATH):
            suricata["eve_log_full_path"] = SURICATA_EVE_LOG_FULL_PATH
            with open(SURICATA_EVE_LOG_FULL_PATH, "rb") as eve_log:
                datalist.append(eve_log.read())
        else:
            for path in separate_log_paths:
                if os.path.exists(path[1]):
                    suricata[path[0]] = path[1]
                    with open(path[1], "rb") as the_log:
                        datalist.append(the_log.read())

        if not datalist:
            log.warning("Suricata: Failed to find usable Suricata log file")

        parsed_files = []
        for data in datalist:
            for line in data.splitlines():
                try:
                    parsed = json.loads(line)
                except:
                    log.warning(
                        "Suricata: Failed to parse line {} as json".format(
                            line))
                    continue

                if 'event_type' in parsed:
                    if parsed["event_type"] == "alert":
                        if (parsed["alert"]["signature_id"]
                                not in sid_blacklist and not parsed["alert"]
                            ["signature"].startswith("SURICATA STREAM")):
                            alog = dict()
                            if parsed["alert"]["gid"] == '':
                                alog["gid"] = "None"
                            else:
                                alog["gid"] = parsed["alert"]["gid"]
                            if parsed["alert"]["rev"] == '':
                                alog["rev"] = "None"
                            else:
                                alog["rev"] = parsed["alert"]["rev"]
                            if parsed["alert"]["severity"] == '':
                                alog["severity"] = "None"
                            else:
                                alog["severity"] = parsed["alert"]["severity"]
                            alog["sid"] = parsed["alert"]["signature_id"]
                            try:
                                alog["srcport"] = parsed["src_port"]
                            except:
                                alog["srcport"] = "None"
                            alog["srcip"] = parsed["src_ip"]
                            try:
                                alog["dstport"] = parsed["dest_port"]
                            except:
                                alog["dstport"] = "None"
                            alog["dstip"] = parsed["dest_ip"]
                            alog["protocol"] = parsed["proto"]
                            alog["timestamp"] = parsed["timestamp"].replace(
                                "T", " ")
                            if parsed["alert"]["category"] == '':
                                alog["category"] = "None"
                            else:
                                alog["category"] = parsed["alert"]["category"]
                            alog["signature"] = parsed["alert"]["signature"]
                            suricata["alerts"].append(alog)

                    elif parsed["event_type"] == "http":
                        hlog = dict()
                        hlog["srcport"] = parsed["src_port"]
                        hlog["srcip"] = parsed["src_ip"]
                        hlog["dstport"] = parsed["dest_port"]
                        hlog["dstip"] = parsed["dest_ip"]
                        hlog["timestamp"] = parsed["timestamp"].replace(
                            "T", " ")
                        keyword = ("uri", "length", "hostname", "status",
                                   "http_method", "contenttype", "ua",
                                   "referrer")
                        keyword_suri = ("url", "length", "hostname", "status",
                                        "http_method", "http_content_type",
                                        "http_user_agent", "http_refer")
                        for key, key_s in zip(keyword, keyword_suri):
                            try:
                                hlog[key] = parsed["http"].get(key_s, "None")
                            except:
                                hlog[key] = "None"
                        suricata["http"].append(hlog)

                    elif parsed["event_type"] == "tls":
                        tlog = dict()
                        tlog["srcport"] = parsed["src_port"]
                        tlog["srcip"] = parsed["src_ip"]
                        tlog["dstport"] = parsed["dest_port"]
                        tlog["dstip"] = parsed["dest_ip"]
                        tlog["timestamp"] = parsed["timestamp"].replace(
                            "T", " ")
                        for key in tls_items:
                            if key in parsed["tls"]:
                                tlog[key] = parsed["tls"][key]
                        suricata["tls"].append(tlog)

                    elif parsed["event_type"] == "ssh":
                        suricata["ssh"].append(parsed)
                    elif parsed["event_type"] == "dns":
                        suricata["dns"].append(parsed)
                    elif parsed["event_type"] == "fileinfo":
                        flog = dict()
                        flog["http_host"] = parsed.get("http",
                                                       {}).get("hostname", "")
                        flog["http_uri"] = parsed.get("http",
                                                      {}).get("url", "")
                        flog["http_referer"] = parsed.get("http", {}).get(
                            "referer", "")
                        flog["http_user_agent"] = parsed.get("http", {}).get(
                            "http_user_agent", "")
                        flog["protocol"] = parsed.get("proto", "")
                        flog["magic"] = parsed.get("fileinfo",
                                                   {}).get("magic", "")
                        flog["size"] = parsed.get("fileinfo",
                                                  {}).get("size", "")
                        flog["stored"] = parsed.get("fileinfo",
                                                    {}).get("stored", "")
                        flog["sha256"] = parsed.get("fileinfo",
                                                    {}).get("sha256", "")
                        flog["md5"] = parsed.get("fileinfo", {}).get("md5", "")
                        flog["filename"] = parsed.get("fileinfo",
                                                      {}).get("filename", "")
                        flog["file_info"] = dict()
                        if "/" in flog["filename"]:
                            flog["filename"] = flog["filename"].split("/")[-1]
                        parsed_files.append(flog)

        if parsed_files:
            for sfile in parsed_files:
                if sfile.get("stored", False):
                    filename = sfile["sha256"]
                    src_file = "{}/{}/{}".format(SURICATA_FILES_DIR_FULL_PATH,
                                                 filename[0:2], filename)
                    dst_file = "{}/{}".format(SURICATA_FILES_DIR_FULL_PATH,
                                              filename)
                    if os.path.exists(src_file):
                        try:
                            shutil.move(src_file, dst_file)
                        except OSError as e:
                            log.warning(
                                "Unable to move suricata file: {}".format(e))
                            break
                        file_info = File(file_path=dst_file).get_all()
                        try:
                            with open(file_info["path"], "r") as drop_open:
                                filedata = drop_open.read(
                                    SURICATA_FILE_BUFFER + 1)
                            if len(filedata) > SURICATA_FILE_BUFFER:
                                file_info["data"] = convert_to_printable(
                                    filedata[:SURICATA_FILE_BUFFER] +
                                    " <truncated>")
                            else:
                                file_info["data"] = convert_to_printable(
                                    filedata)
                        except UnicodeDecodeError as e:
                            pass
                        if file_info:
                            sfile["file_info"] = file_info
                    suricata["files"].append(sfile)
            with open(SURICATA_FILE_LOG_FULL_PATH, "w") as drop_log:
                drop_log.write(json.dumps(suricata["files"], indent=4))

            # Cleanup file subdirectories left behind by messy Suricata
            for d in [
                    dirpath
                    for (dirpath, dirnames,
                         filenames) in os.walk(SURICATA_FILES_DIR_FULL_PATH)
                    if len(dirnames) == 0 and len(filenames) == 0
            ]:
                try:
                    shutil.rmtree(d)
                except OSError as e:
                    log.warning(
                        "Unable to delete suricata file subdirectories: {}".
                        format(e))

        if SURICATA_FILES_DIR_FULL_PATH and os.path.exists(SURICATA_FILES_DIR_FULL_PATH) and Z7_PATH \
                and os.path.exists(Z7_PATH):
            # /usr/bin/7z a -pinfected -y files.zip files-json.log files
            cmdstr = "cd {} && {} a -p{} -y files.zip {} {}"
            cmd = cmdstr.format(self.logs_path, Z7_PATH, FILES_ZIP_PASS,
                                SURICATA_FILE_LOG, SURICATA_FILES_DIR)
            ret, stdout, stderr = self.cmd_wrapper(cmd)
            if ret > 1:
                log.warning(
                    "Suricata: Failed to create {}/files.zip - Error {}".
                    format(self.logs_path, ret))

        suricata["alerts"] = self.sort_by_timestamp(suricata["alerts"])
        suricata["http"] = self.sort_by_timestamp(suricata["http"])
        suricata["tls"] = self.sort_by_timestamp(suricata["tls"])

        return suricata
Exemple #52
0
    def _add_http(self, conn, tcpdata):
        """Adds an HTTP flow.
        @param conn: TCP connection info.
        @param tcpdata: TCP data flow.
        """
        if tcpdata in self.http_requests:
            self.http_requests[tcpdata]["count"] += 1
            return True

        try:
            http = dpkt.http.Request()
            http.unpack(tcpdata)
        except dpkt.dpkt.UnpackError:
            pass

        try:
            entry = {"count": 1}

            if "host" in http.headers and re.match(
                    '^([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9])(\.([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9]))+(:[0-9]{1,5})?$',
                    http.headers["host"], re.IGNORECASE):
                entry["host"] = convert_to_printable(http.headers["host"])
            else:
                entry["host"] = conn["dst"]

            if enabled_whitelist:
                for reject in self.domain_whitelist:
                    # comment or empty line
                    if reject.startswith("#") or len(reject.strip()) == 0:
                        continue
                    if re.search(reject, entry["host"]):
                        return False

            entry["port"] = conn["dport"]

            # Manually deal with cases when destination port is not the default one,
            # and it is  not included in host header.
            netloc = entry["host"]
            if entry["port"] != 80 and ":" not in netloc:
                netloc += ":" + str(entry["port"])

            entry["data"] = convert_to_printable(tcpdata)
            entry["uri"] = convert_to_printable(
                urlunparse(("http", netloc, http.uri, None, None, None)))
            entry["body"] = convert_to_printable(http.body)
            entry["path"] = convert_to_printable(http.uri)

            if "user-agent" in http.headers:
                entry["user-agent"] = \
                    convert_to_printable(http.headers["user-agent"])
            else:
                entry["user-agent"] = ""

            entry["version"] = convert_to_printable(http.version)
            entry["method"] = convert_to_printable(http.method)

            self.http_requests[tcpdata] = entry
        except Exception:
            return False

        return True
Exemple #53
0
    def _parse(self, row):
        """Parse log row.
        @param row: row data.
        @return: parsed information dict.
        """
        arguments = []

        try:
            timestamp = row[0]  # Timestamp of current API call invocation.
            thread_id = row[1]  # Thread ID.
            caller = row[2]  # non-system DLL return address
            parentcaller = row[3]  # non-system DLL parent of non-system-DLL return address
            category = row[4]  # Win32 function category.
            api_name = row[5]  # Name of the Windows API.
            repeated = row[6]  # Times log repeated
            status_value = row[7]  # Success or Failure?
            return_value = row[8]  # Value returned by the function.
        except IndexError as e:
            log.debug("Unable to parse process log row: %s", e)
            return None

        # Now walk through the remaining columns, which will contain API
        # arguments.
        for api_arg in row[9:]:
            # Split the argument name with its value based on the separator.
            try:
                arg_name, arg_value = api_arg
            except ValueError as e:
                log.debug("Unable to parse analysis row argument (row=%s): %s", api_arg, e)
                continue

            argument = {"name": arg_name}
            if isinstance(arg_value, bytes):
                arg_value = bytes2str(arg_value)

            if arg_value and isinstance(arg_value, list) and len(arg_value) >= 1 and isinstance(arg_value[0], bytes):
                arg_value = " ".join(bytes2str(arg_value))

            try:
                argument["value"] = convert_to_printable(arg_value, self.conversion_cache)
            except Exception as e:
                log.error(arg_value, exc_info=True)
                continue
            if not self.reporting_mode:
                argument["raw_value"] = arg_value
            pretty = pretty_print_arg(category, api_name, arg_name, argument["value"])
            if pretty:
                argument["pretty_value"] = pretty
            arguments.append(argument)

        call = {
            "timestamp": timestamp,
            "thread_id": str(thread_id),
            "caller": f"0x{default_converter(caller):08x}",
            "parentcaller": f"0x{default_converter(parentcaller):08x}",
            "category": category,
            "api": api_name,
            "status": bool(int(status_value)),
        }

        if isinstance(return_value, int):
            call["return"] = f"0x{default_converter(return_value):08x}"
        else:
            call["return"] = convert_to_printable(str(return_value), self.conversion_cache)

        prettyret = pretty_print_retval(call["status"], call["return"])
        if prettyret:
            call["pretty_return"] = prettyret

        call["arguments"] = arguments
        call["repeated"] = repeated

        # add the thread id to our thread set
        if call["thread_id"] not in self.threads:
            self.threads.append(call["thread_id"])

        return call
Exemple #54
0
    def launch_analysis(self):
        """Start analysis."""
        succeeded = False
        dead_machine = False
        self.socks5s = _load_socks5_operational()

        log.info("Task #{0}: Starting analysis of {1} '{2}'".format(
            self.task.id, self.task.category.upper(),
            convert_to_printable(self.task.target)))

        # Initialize the analysis folders.
        if not self.init_storage():
            log.debug("Failed to initialize the analysis folder")
            return False

        if self.task.category in ["file", "pcap", "static"]:
            sha256 = File(self.task.target).get_sha256()
            # Check whether the file has been changed for some unknown reason.
            # And fail this analysis if it has been modified.
            if not self.check_file(sha256):
                return False

            # Store a copy of the original file.
            if not self.store_file(sha256):
                return False

        if self.task.category in ("pcap", "static"):
            if self.task.category == "pcap":
                if hasattr(os, "symlink"):
                    os.symlink(self.binary,
                               os.path.join(self.storage, "dump.pcap"))
                else:
                    shutil.copy(self.binary,
                                os.path.join(self.storage, "dump.pcap"))
            # create the logs/files directories as
            # normally the resultserver would do it
            dirnames = ["logs", "files", "aux"]
            for dirname in dirnames:
                try:
                    os.makedirs(os.path.join(self.storage, dirname))
                except:
                    pass
            return True

        # Acquire analysis machine.
        try:
            self.acquire_machine()
            self.db.set_task_vm(self.task.id, self.machine.label,
                                self.machine.id)
        # At this point we can tell the ResultServer about it.
        except CuckooOperationalError as e:
            machine_lock.release()
            log.error("Task #{0}: Cannot acquire machine: {1}".format(
                self.task.id, e),
                      exc_info=True)
            return False

        # Generate the analysis configuration file.
        options = self.build_options()

        try:
            ResultServer().add_task(self.task, self.machine)
        except Exception as e:
            machinery.release(self.machine.label)
            log.exception(e, exc_info=True)
            self.errors.put(e)

        aux = RunAuxiliary(task=self.task, machine=self.machine)

        try:
            unlocked = False

            # Mark the selected analysis machine in the database as started.
            guest_log = self.db.guest_start(self.task.id, self.machine.name,
                                            self.machine.label,
                                            machinery.__class__.__name__)
            # Start the machine.
            machinery.start(self.machine.label)

            # Enable network routing.
            self.route_network()

            # By the time start returns it will have fully started the Virtual
            # Machine. We can now safely release the machine lock.
            machine_lock.release()
            unlocked = True

            aux.start()

            # Initialize the guest manager.
            guest = GuestManager(self.machine.name, self.machine.ip,
                                 self.machine.platform, self.task.id, self)

            options["clock"] = self.db.update_clock(self.task.id)
            self.db.guest_set_status(self.task.id, "starting")
            # Start the analysis.
            guest.start_analysis(options)
            if self.db.guest_get_status(self.task.id) == "starting":
                self.db.guest_set_status(self.task.id, "running")
                guest.wait_for_completion()

            self.db.guest_set_status(self.task.id, "stopping")
            succeeded = True
        except CuckooMachineError as e:
            if not unlocked:
                machine_lock.release()
            log.error(str(e), extra={"task_id": self.task.id}, exc_info=True)
            dead_machine = True
        except CuckooGuestError as e:
            if not unlocked:
                machine_lock.release()
            log.error(str(e), extra={"task_id": self.task.id}, exc_info=True)
        finally:
            # Stop Auxiliary modules.
            aux.stop()

            # Take a memory dump of the machine before shutting it off.
            if self.cfg.cuckoo.memory_dump or self.task.memory:
                try:
                    dump_path = get_memdump_path(self.task.id)
                    need_space, space_available = free_space_monitor(
                        os.path.dirname(dump_path), return_value=True)
                    if need_space:
                        log.error(
                            "Not enough free disk space! Could not dump ram (Only %d MB!)",
                            space_available)
                    else:
                        machinery.dump_memory(self.machine.label, dump_path)
                except NotImplementedError:
                    log.error("The memory dump functionality is not available "
                              "for the current machine manager.")

                except CuckooMachineError as e:
                    log.error(e, exc_info=True)

            try:
                # Stop the analysis machine.
                machinery.stop(self.machine.label)

            except CuckooMachineError as e:
                log.warning(
                    "Task #{0}: Unable to stop machine {1}: {2}".format(
                        self.task.id, self.machine.label, e))

            # Mark the machine in the database as stopped. Unless this machine
            # has been marked as dead, we just keep it as "started" in the
            # database so it'll not be used later on in this session.
            self.db.guest_stop(guest_log)

            # After all this, we can make the ResultServer forget about the
            # internal state for this analysis task.
            ResultServer().del_task(self.task, self.machine)

            # Drop the network routing rules if any.
            self.unroute_network()

            if dead_machine:
                # Remove the guest from the database, so that we can assign a
                # new guest when the task is being analyzed with another
                # machine.
                self.db.guest_remove(guest_log)

                # Remove the analysis directory that has been created so
                # far, as launch_analysis() is going to be doing that again.
                shutil.rmtree(self.storage)

                # This machine has turned dead, so we throw an exception here
                # which informs the AnalysisManager that it should analyze
                # this task again with another available machine.
                raise CuckooDeadMachine()

            try:
                # Release the analysis machine. But only if the machine has
                # not turned dead yet.
                machinery.release(self.machine.label)

            except CuckooMachineError as e:
                log.error("Task #{0}: Unable to release machine {1}, reason "
                          "{2}. You might need to restore it manually.".format(
                              self.task.id, self.machine.label, e))

        return succeeded
Exemple #55
0
    def run(self):
        """Run analysis.
        @return: list of process dumps with related information.
        """
        self.key = "procdump"
        procdump_files = []
        buf = self.options.get("buffer", 8192)
        if not hasattr(self, "procdump_path") or not os.path.exists(
                self.procdump_path):
            return None
        file_names = os.listdir(self.procdump_path)
        for file_name in file_names:
            file_path = os.path.join(self.procdump_path, file_name)
            if not os.path.isfile(file_path):
                continue
            if file_name.endswith("_info.txt"):
                continue
            with open(file_path + "_info.txt", 'r') as f:
                metastring = f.readline()
            file_info = File(file_path=file_path,
                             guest_paths=metastring,
                             file_name=file_name).get_all()
            metastrings = metastring.split(",")
            file_info["process_path"] = metastrings[2]
            file_info["module_path"] = metastrings[3]
            file_info["process_name"] = file_info["process_path"].split(
                "\\")[-1]
            file_info["pid"] = metastrings[1]
            file_info["cape_type"] = "PE image"
            type_strings = file_info["type"].split()
            if type_strings[0] == "MS-DOS":
                file_info["cape_type"] = "PE image: MS-DOS"
            else:
                if type_strings[0] == ("PE32+"):
                    file_info["cape_type"] += ": 64-bit "
                elif type_strings[0] == ("PE32"):
                    file_info["cape_type"] += ": 32-bit "
                if type_strings[2] == ("(DLL)"):
                    file_info["cape_type"] += "DLL"
                else:
                    file_info["cape_type"] += "executable"
            texttypes = [
                "ASCII",
                "Windows Registry text",
                "XML document text",
                "Unicode text",
            ]
            readit = False
            for texttype in texttypes:
                if texttype in file_info["type"]:
                    readit = True
                    break
            if readit:
                with open(file_info["path"], "r") as drop_open:
                    filedata = drop_open.read(buf + 1)
                if len(filedata) > buf:
                    file_info["data"] = convert_to_printable(filedata[:buf] +
                                                             " <truncated>")
                else:
                    file_info["data"] = convert_to_printable(filedata)

            procdump_files.append(file_info)

        return procdump_files
Exemple #56
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}
        buf = self.options.get("buffer", 8192)

        if self.task["category"] in ("pcap", "static"):
            return dropped_files

        if not os.path.exists(self.dropped_path):
            return dropped_files

        if os.path.exists(self.files_metadata):
            for line in open(self.files_metadata, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta.setdefault(filepath, []).append({
                    "pids":
                    entry["pids"],
                    "filepath":
                    entry["filepath"],
                })

        for dir_name, _, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)
                file_info.update(meta.get(file_info["path"][0], {}))
                if file_path in meta:
                    guest_paths = list(
                        set([path.get("filepath")
                             for path in meta[file_path]]))
                    guest_names = list(
                        set([
                            path.get("filepath", "").rsplit("\\", 1)[-1]
                            for path in meta[file_path]
                        ]))
                else:
                    guest_paths = []
                    guest_names = []

                file_info["guest_paths"] = guest_paths if isinstance(
                    guest_paths, list) else [guest_paths]
                file_info["name"] = guest_names

                try:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)

                    filedata = wide2str(filedata)
                    if len(filedata) > buf:
                        file_info["data"] = convert_to_printable(
                            f"{filedata[:buf]} <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)
                except UnicodeDecodeError as e:
                    pass
                dropped_files.append(file_info)

        for dir_name, _, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)

                # Allows to put execute file extractors/unpackers
                generic_file_extractors(file_path, self.dropped_path,
                                        file_info.get("type", ""), file_info)

                dropped_files.append(file_info)

        return dropped_files
Exemple #57
0
    def run(self):
        commands = [
            "bypass",
            "unrestricted",
            "YnlwYXNz",
            "J5cGFzc",
            "ieXBhc3",
            "dW5yZXN0cmljdGVk",
            "VucmVzdHJpY3RlZ",
            "1bnJlc3RyaWN0ZW",
            "-nop",
            "/nop",
            "-e ",
            "/e ",
            "-en ",
            "/en ",
            "-enc",
            "/enc",
            "-noni",
            "/noni",
            "start-process",
            "downloadfile(",
            "ZG93bmxvYWRmaWxlK",
            "Rvd25sb2FkZmlsZS",
            "kb3dubG9hZGZpbGUo",
            "net.webrequest",
            "start-bitstransfer",
            "invoke-item",
            "frombase64string(",
            "convertto-securestring",
            "securestringtoglobalallocunicode",
            "downloadstring(",
            "shellexecute(",
            "downloaddata(",
        ]

        ret = False
        cmdlines = self.results["behavior"]["summary"]["executed_commands"]
        for cmdline in cmdlines:
            lower = cmdline.lower()
            if "powershell" in lower:
                for command in commands:
                    if command in lower:
                        ret = True
                        self.data.append({"command": cmdline})
                        break
                if ("-w" in lower or "/w" in lower) and "hidden" in lower:
                    ret = True
                    self.data.append({"command": cmdline})

                # Decode base64 strings for reporting; will adjust this later to add detection matches against decoded content. We don't take into account here when a variable is used i.e. "$encoded = BASE64_CONTENT -enc $encoded" and so evasion from decoding the content is possible. Alternatively we could just try to hunt for base64 content in powershell command lines but this will need to be tested
                if "-e " in lower or "/e " in lower or "-en " in lower or "/en " in lower or "-enc" in lower or "/enc" in lower:
                    b64strings = re.findall(
                        r'[-\/][eE][nNcCoOdDeEmMaA]{0,13}\ (\S+)', cmdline)
                    for b64string in b64strings:
                        b64 = True
                        encoded = str(b64string)
                        try:
                            base64.b64decode(encoded)
                        except binascii.Error:
                            b64 = False
                        if b64:
                            decoded = base64.b64decode(encoded)
                            if "\x00" in decoded:
                                decoded = base64.b64decode(encoded).decode(
                                    'UTF-16')
                            self.data.append({
                                "decoded_base64_string":
                                convert_to_printable(decoded)
                            })

                if "frombase64string(" in lower:
                    b64strings = re.findall(
                        r'[fF][rR][oO][mM][bB][aA][sS][eE]64[sS][tT][rR][iI][nN][gG]\([\"\'](\S+)[\"\']\)',
                        cmdline)
                    for b64string in b64strings:
                        b64 = True
                        encoded = str(b64string)
                        try:
                            base64.b64decode(encoded)
                        except binascii.Error:
                            b64 = False
                        if b64:
                            decoded = base64.b64decode(encoded)
                            if "\x00" in decoded:
                                decoded = base64.b64decode(encoded).decode(
                                    'UTF-16')
                            self.data.append({
                                "decoded_base64_string":
                                convert_to_printable(decoded)
                            })

        return ret
Exemple #58
0
    def run(self):
        """Run Suricata.
        @return: hash with alerts
        """
        self.key = "suricata"
        # General
        SURICATA_CONF = self.options.get("conf", None)
        SURICATA_EVE_LOG = self.options.get("evelog", None)
        SURICATA_ALERT_LOG = self.options.get("alertlog", None)
        SURICATA_TLS_LOG = self.options.get("tlslog", None)
        SURICATA_HTTP_LOG = self.options.get("httplog", None)
        SURICATA_SSH_LOG = self.options.get("sshlog", None)
        SURICATA_DNS_LOG = self.options.get("dnslog", None)
        SURICATA_FILE_LOG = self.options.get("fileslog", None)
        SURICATA_FILES_DIR = self.options.get("filesdir", None)
        SURICATA_RUNMODE = self.options.get("runmode", None)
        SURICATA_FILE_BUFFER = self.options.get("buffer", 8192)
        Z7_PATH = self.options.get("7zbin", None)
        FILES_ZIP_PASS = self.options.get("zippass", None)
        SURICATA_FILE_COPY_DST_DIR = self.options.get("file_copy_dest_dir",
                                                      None)
        SURICATA_FILE_COPY_MAGIC_RE = self.options.get("file_magic_re", None)
        if SURICATA_FILE_COPY_MAGIC_RE:
            try:
                SURICATA_FILE_COPY_MAGIC_RE = re.compile(
                    SURICATA_FILE_COPY_MAGIC_RE)
            except:
                log.warning("Failed to compile suricata copy magic RE" %
                            (SURICATA_FILE_COPY_MAGIC_RE))
                SURICATA_FILE_COPY_MAGIC_RE = None
        # Socket
        SURICATA_SOCKET_PATH = self.options.get("socket_file", None)
        SURICATA_SOCKET_PYLIB = self.options.get("pylib_dir", None)

        # Command Line
        SURICATA_BIN = self.options.get("bin", None)

        suricata = {}
        suricata["alerts"] = []
        suricata["tls"] = []
        suricata["perf"] = []
        suricata["files"] = []
        suricata["http"] = []
        suricata["dns"] = []
        suricata["ssh"] = []
        suricata["file_info"] = []

        suricata["eve_log_full_path"] = None
        suricata["alert_log_full_path"] = None
        suricata["tls_log_full_path"] = None
        suricata["http_log_full_path"] = None
        suricata["file_log_full_path"] = None
        suricata["ssh_log_full_path"] = None
        suricata["dns_log_full_path"] = None

        SURICATA_ALERT_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                  SURICATA_ALERT_LOG)
        SURICATA_TLS_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_TLS_LOG)
        SURICATA_HTTP_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                 SURICATA_HTTP_LOG)
        SURICATA_SSH_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_SSH_LOG)
        SURICATA_DNS_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_DNS_LOG)
        SURICATA_EVE_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                SURICATA_EVE_LOG)
        SURICATA_FILE_LOG_FULL_PATH = "%s/%s" % (self.logs_path,
                                                 SURICATA_FILE_LOG)
        SURICATA_FILES_DIR_FULL_PATH = "%s/%s" % (self.logs_path,
                                                  SURICATA_FILES_DIR)

        separate_log_paths = [
            ("alert_log_full_path", SURICATA_ALERT_LOG_FULL_PATH),
            ("tls_log_full_path", SURICATA_TLS_LOG_FULL_PATH),
            ("http_log_full_path", SURICATA_HTTP_LOG_FULL_PATH),
            ("ssh_log_full_path", SURICATA_SSH_LOG_FULL_PATH),
            ("dns_log_full_path", SURICATA_DNS_LOG_FULL_PATH)
        ]

        # handle reprocessing
        all_log_paths = [x[1] for x in separate_log_paths] + \
            [SURICATA_EVE_LOG_FULL_PATH, SURICATA_FILE_LOG_FULL_PATH]
        for log_path in all_log_paths:
            if os.path.exists(log_path):
                try:
                    os.unlink(log_path)
                except:
                    pass
        if os.path.isdir(SURICATA_FILES_DIR_FULL_PATH):
            try:
                shutil.rmtree(SURICATA_FILES_DIR_FULL_PATH, ignore_errors=True)
            except:
                pass

        if not os.path.exists(SURICATA_CONF):
            log.warning("Unable to Run Suricata: Conf File %s Does Not Exist" %
                        (SURICATA_CONF))
            return suricata["alerts"]
        if not os.path.exists(self.pcap_path):
            log.warning("Unable to Run Suricata: Pcap file %s Does Not Exist" %
                        (self.pcap_path))
            return suricata["alerts"]

        # Add to this if you wish to ignore any SIDs for the suricata alert logs
        # Useful for ignoring SIDs without disabling them. Ex: surpress an alert for
        # a SID which is a dependent of another. (Bad TCP data for HTTP(S) alert)
        sid_blacklist = [
            # SURICATA FRAG IPv6 Fragmentation overlap
            2200074,
            # ET INFO InetSim Response from External Source Possible SinkHole
            2017363,
            # SURICATA UDPv4 invalid checksum
            2200075,
            # ET POLICY SSLv3 outbound connection from client vulnerable to POODLE attack
            2019416,
        ]

        if SURICATA_RUNMODE == "socket":
            if SURICATA_SOCKET_PYLIB != None:
                sys.path.append(SURICATA_SOCKET_PYLIB)
            try:
                from suricatasc import SuricataSC
            except Exception as e:
                log.warning("Failed to import suricatasc lib %s" % (e))
                return suricata

            loopcnt = 0
            maxloops = 24
            loopsleep = 5

            args = {}
            args["filename"] = self.pcap_path
            args["output-dir"] = self.logs_path

            suris = SuricataSC(SURICATA_SOCKET_PATH)
            try:
                suris.connect()
                suris.send_command("pcap-file", args)
            except Exception as e:
                log.warning(
                    "Failed to connect to socket and send command %s: %s" %
                    (SURICATA_SOCKET_PATH, e))
                return suricata
            while loopcnt < maxloops:
                try:
                    pcap_flist = suris.send_command("pcap-file-list")
                    current_pcap = suris.send_command("pcap-current")
                    log.debug("pcapfile list: %s current pcap: %s" %
                              (pcap_flist, current_pcap))

                    if self.pcap_path not in pcap_flist["message"][
                            "files"] and current_pcap[
                                "message"] != self.pcap_path:
                        log.debug(
                            "Pcap not in list and not current pcap lets assume it's processed"
                        )
                        break
                    else:
                        loopcnt = loopcnt + 1
                        time.sleep(loopsleep)
                except Exception as e:
                    log.warning(
                        "Failed to get pcap status breaking out of loop %s" %
                        (e))
                    break

            if loopcnt == maxloops:
                log.warning(
                    "Loop timeout of %ssec occured waiting for file %s to finish processing"
                    % (maxloops * loopsleep, pcapfile))
                return suricata
        elif SURICATA_RUNMODE == "cli":
            if not os.path.exists(SURICATA_BIN):
                log.warning(
                    "Unable to Run Suricata: Bin File %s Does Not Exist" %
                    (SURICATA_CONF))
                return suricata["alerts"]
            cmd = "%s -c %s -k none -l %s -r %s" % (
                SURICATA_BIN, SURICATA_CONF, self.logs_path, self.pcap_path)
            ret, stdout, stderr = self.cmd_wrapper(cmd)
            if ret != 0:
                log.warning(
                    "Suricata returned a Exit Value Other than Zero %s" %
                    (stderr))
                return suricata

        else:
            log.warning("Unknown Suricata Runmode")
            return suricata

        datalist = []
        if os.path.exists(SURICATA_EVE_LOG_FULL_PATH):
            suricata["eve_log_full_path"] = SURICATA_EVE_LOG_FULL_PATH
            with open(SURICATA_EVE_LOG_FULL_PATH, "rb") as eve_log:
                datalist.append(eve_log.read())
        else:
            for path in separate_log_paths:
                if os.path.exists(path[1]):
                    suricata[path[0]] = path[1]
                    with open(path[1], "rb") as the_log:
                        datalist.append(the_log.read())

        if not datalist:
            log.warning("Suricata: Failed to find usable Suricata log file")

        for data in datalist:
            for line in data.splitlines():
                try:
                    parsed = json.loads(line)
                except:
                    log.warning("Suricata: Failed to parse line as json" %
                                (line))
                    continue

                if 'event_type' in parsed:
                    if parsed["event_type"] == "alert":
                        if (parsed["alert"]["signature_id"]
                                not in sid_blacklist and not parsed["alert"]
                            ["signature"].startswith("SURICATA STREAM")):
                            alog = dict()
                            if parsed["alert"]["gid"] == '':
                                alog["gid"] = "None"
                            else:
                                alog["gid"] = parsed["alert"]["gid"]
                            if parsed["alert"]["rev"] == '':
                                alog["rev"] = "None"
                            else:
                                alog["rev"] = parsed["alert"]["rev"]
                            if parsed["alert"]["severity"] == '':
                                alog["severity"] = "None"
                            else:
                                alog["severity"] = parsed["alert"]["severity"]
                            alog["sid"] = parsed["alert"]["signature_id"]
                            try:
                                alog["srcport"] = parsed["src_port"]
                            except:
                                alog["srcport"] = "None"
                            alog["srcip"] = parsed["src_ip"]
                            try:
                                alog["dstport"] = parsed["dest_port"]
                            except:
                                alog["dstport"] = "None"
                            alog["dstip"] = parsed["dest_ip"]
                            alog["protocol"] = parsed["proto"]
                            alog["timestamp"] = parsed["timestamp"].replace(
                                "T", " ")
                            if parsed["alert"]["category"] == '':
                                alog["category"] = "None"
                            else:
                                alog["category"] = parsed["alert"]["category"]
                            alog["signature"] = parsed["alert"]["signature"]
                            suricata["alerts"].append(alog)

                    elif parsed["event_type"] == "http":
                        hlog = dict()
                        hlog["srcport"] = parsed["src_port"]
                        hlog["srcip"] = parsed["src_ip"]
                        hlog["dstport"] = parsed["dest_port"]
                        hlog["dstip"] = parsed["dest_ip"]
                        hlog["timestamp"] = parsed["timestamp"].replace(
                            "T", " ")
                        try:
                            hlog["uri"] = parsed["http"]["url"]
                        except:
                            hlog["uri"] = "None"
                        hlog["length"] = parsed["http"]["length"]
                        try:
                            hlog["hostname"] = parsed["http"]["hostname"]
                        except:
                            hlog["hostname"] = "None"
                        try:
                            hlog["status"] = str(parsed["http"]["status"])
                        except:
                            hlog["status"] = "None"
                        try:
                            hlog["method"] = parsed["http"]["http_method"]
                        except:
                            hlog["method"] = "None"
                        try:
                            hlog["contenttype"] = parsed["http"][
                                "http_content_type"]
                        except:
                            hlog["contenttype"] = "None"
                        try:
                            hlog["ua"] = parsed["http"]["http_user_agent"]
                        except:
                            hlog["ua"] = "None"
                        try:
                            hlog["referrer"] = parsed["http"]["http_refer"]
                        except:
                            hlog["referrer"] = "None"
                        suricata["http"].append(hlog)

                    elif parsed["event_type"] == "tls":
                        tlog = dict()
                        tlog["srcport"] = parsed["src_port"]
                        tlog["srcip"] = parsed["src_ip"]
                        tlog["dstport"] = parsed["dest_port"]
                        tlog["dstip"] = parsed["dest_ip"]
                        tlog["timestamp"] = parsed["timestamp"].replace(
                            "T", " ")
                        tlog["fingerprint"] = parsed["tls"]["fingerprint"]
                        tlog["issuer"] = parsed["tls"]["issuerdn"]
                        tlog["version"] = parsed["tls"]["version"]
                        tlog["subject"] = parsed["tls"]["subject"]
                        suricata["tls"].append(tlog)

                    elif parsed["event_type"] == "ssh":
                        suricata["ssh"].append(parsed)
                    elif parsed["event_type"] == "dns":
                        suricata["dns"].append(parsed)

        if os.path.exists(SURICATA_FILE_LOG_FULL_PATH):
            suricata["file_log_full_path"] = SURICATA_FILE_LOG_FULL_PATH
            f = open(SURICATA_FILE_LOG_FULL_PATH, "rb").readlines()
            for l in f:
                try:
                    d = json.loads(l)
                except:
                    log.warning("failed to load JSON from file log")
                    continue
                # Some log entries do not have an id
                if "id" not in d:
                    continue
                src_file = "%s/file.%s" % (SURICATA_FILES_DIR_FULL_PATH,
                                           d["id"])
                if os.path.exists(src_file):
                    if SURICATA_FILE_COPY_MAGIC_RE and SURICATA_FILE_COPY_DST_DIR and os.path.exists(
                            SURICATA_FILE_COPY_DST_DIR):
                        try:
                            m = re.search(SURICATA_FILE_COPY_MAGIC_RE,
                                          d["magic"])
                            if m:
                                dst_file = "%s/%s" % (
                                    SURICATA_FILE_COPY_DST_DIR, d["md5"])
                                shutil.copy2(src_file, dst_file)
                                log.warning("copied %s to %s" %
                                            (src_file, dst_file))
                        except Exception, e:
                            log.warning("Unable to copy suricata file: %s" % e)
                    file_info = File(file_path=src_file).get_all()
                    texttypes = [
                        "ASCII",
                        "Windows Registry text",
                        "XML document text",
                        "Unicode text",
                    ]
                    readit = False
                    for texttype in texttypes:
                        if texttype in file_info["type"]:
                            readit = True
                            break
                    if readit:
                        with open(file_info["path"], "rb") as drop_open:
                            filedata = drop_open.read(SURICATA_FILE_BUFFER + 1)
                        if len(filedata) > SURICATA_FILE_BUFFER:
                            file_info["data"] = convert_to_printable(
                                filedata[:SURICATA_FILE_BUFFER] +
                                " <truncated>")
                        else:
                            file_info["data"] = convert_to_printable(filedata)
                    d["file_info"] = file_info
                if "/" in d["filename"]:
                    d["filename"] = d["filename"].split("/")[-1]
                suricata["files"].append(d)
Exemple #59
0
 def _process_smtp(self):
     """Process SMTP flow."""
     for conn, data in self.smtp_flow.items():
         # Detect new SMTP flow.
         if data.startswith((b"EHLO", b"HELO")):
             self.smtp_requests.append({"dst": conn, "raw": convert_to_printable(data)})