Exemple #1
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}

        if os.path.exists(self.dropped_meta_path):
            for line in open(self.dropped_meta_path, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta[filepath] = {
                    "pids": entry["pids"],
                    "filepath": entry["filepath"],
                }

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                file_info.update(meta.get(file_info["path"], {}))
                dropped_files.append(file_info)

        for dir_name, dir_names, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                dropped_files.append(file_info)

        return dropped_files
Exemple #2
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}

        if os.path.exists(self.dropped_meta_path):
            for line in open(self.dropped_meta_path, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta[filepath] = {
                    "pids": entry["pids"],
                    "filepath": entry["filepath"],
                }

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                file_info.update(meta.get(file_info["path"], {}))
                dropped_files.append(file_info)

        for dir_name, dir_names, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                dropped_files.append(file_info)

        return dropped_files
Exemple #3
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}
        buf = self.options.get("buffer", 8192)

        if self.task["category"] in ("pcap", "static"):
            return dropped_files

        if not os.path.exists(self.dropped_path):
            return dropped_files

        if os.path.exists(self.files_metadata):
            for line in open(self.files_metadata, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta[filepath] = {
                    "pids": entry["pids"],
                    "filepath": entry["filepath"],
                }

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)
                file_info.update(meta.get(file_info["path"], {}))
                guest_path = file_info["filepath"]
                guest_name = guest_path.split("\\")[-1]
                file_info["guest_paths"] = [guest_path]
                file_info["name"] = guest_name
                try:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)
                    if len(filedata) > buf:
                        file_info["data"] = convert_to_printable(
                            filedata[:buf] + " <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)
                except UnicodeDecodeError as e:
                    pass
                dropped_files.append(file_info)

        for dir_name, dir_names, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)
                dropped_files.append(file_info)

        return dropped_files
Exemple #4
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}
        buf = self.options.get("buffer", 8192)

        if self.task["category"] in ("pcap", "static"):
            return dropped_files

        if not os.path.exists(self.dropped_path):
            return dropped_files

        if os.path.exists(self.files_metadata):
            for line in open(self.files_metadata, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta.setdefault(filepath, []).append({
                    "pids":
                    entry["pids"],
                    "filepath":
                    entry["filepath"],
                })

        for dir_name, _, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)
                file_info.update(meta.get(file_info["path"][0], {}))
                if file_path in meta:
                    guest_paths = list(
                        set([path.get("filepath")
                             for path in meta[file_path]]))
                    guest_names = list(
                        set([
                            path.get("filepath", "").rsplit("\\", 1)[-1]
                            for path in meta[file_path]
                        ]))
                else:
                    guest_paths = []
                    guest_names = []

                file_info["guest_paths"] = guest_paths if isinstance(
                    guest_paths, list) else [guest_paths]
                file_info["name"] = guest_names

                try:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)

                    filedata = wide2str(filedata)
                    if len(filedata) > buf:
                        file_info["data"] = convert_to_printable(
                            f"{filedata[:buf]} <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)
                except UnicodeDecodeError as e:
                    pass
                dropped_files.append(file_info)

        for dir_name, _, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"],
                                                       pefile_object)

                # Allows to put execute file extractors/unpackers
                generic_file_extractors(file_path, self.dropped_path,
                                        file_info.get("type", ""), file_info)

                dropped_files.append(file_info)

        return dropped_files
Exemple #5
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}
        buf = self.options.get("buffer", 8192)

        if self.task["category"] in ("pcap", "static"):
            return dropped_files

        if not os.path.exists(self.dropped_path):
            return dropped_files

        if os.path.exists(self.files_metadata):
            with open(self.files_metadata, "rb") as f:
                for line in f:
                    entry = json.loads(line)
                    filepath = os.path.join(self.analysis_path, entry["path"])
                    meta.setdefault(filepath, []).append(
                        {
                            "pids": entry["pids"],
                            "filepath": entry["filepath"],
                        }
                    )

        for dir_name, _, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"], pefile_object)
                # ToDo should we pass PE object?
                static_file_info(
                    file_info,
                    file_path,
                    str(self.task["id"]),
                    self.task.get("package", ""),
                    self.task.get("options", ""),
                    self.dropped_path,
                    self.results,
                )

                file_info.update(meta.get(file_info["path"][0], {}))
                file_info["guest_paths"] = list({path.get("filepath") for path in meta.get(file_path, [])})
                file_info["name"] = list({path.get("filepath", "").rsplit("\\", 1)[-1] for path in meta.get(file_path, [])})

                try:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)

                    filedata = wide2str(filedata)
                    file_info["data"] = convert_to_printable_and_truncate(filedata, buf)
                except UnicodeDecodeError:
                    pass
                dropped_files.append(file_info)

        for dir_name, _, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info, pefile_object = File(file_path=file_path).get_all()
                if pefile_object:
                    self.results.setdefault("pefiles", {})
                    self.results["pefiles"].setdefault(file_info["sha256"], pefile_object)

                static_file_info(
                    file_info,
                    file_path,
                    self.task["id"],
                    self.task.get("package", ""),
                    self.task.get("options", ""),
                    self.self_extracted,
                )

                dropped_files.append(file_info)

        return dropped_files
Exemple #6
0
    def run(self):
        """Run analysis.
        @return: list of dropped files with related information.
        """
        self.key = "dropped"
        dropped_files, meta = [], {}
        buf = self.options.get("buffer", 8192)

        if self.task["category"] in ("pcap", "static"):
            return dropped_files

        if not os.path.exists(self.dropped_path):
            return dropped_files

        if os.path.exists(self.files_metadata):
            for line in open(self.files_metadata, "rb"):
                entry = json.loads(line)
                filepath = os.path.join(self.analysis_path, entry["path"])
                meta[filepath] = {
                    "pids": entry["pids"],
                    "filepath": entry["filepath"],
                }

        for dir_name, dir_names, file_names in os.walk(self.dropped_path):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                file_info.update(meta.get(file_info["path"], {}))
                dropped_files.append(file_info)

        for dir_name, dir_names, file_names in os.walk(self.package_files):
            for file_name in file_names:
                file_path = os.path.join(dir_name, file_name)
                file_info = File(file_path=file_path).get_all()
                dropped_files.append(file_info)

        return dropped_files

        # ToDo adapt
        textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f})
        is_binary_file = lambda bytes: bool(bytes.translate(None, textchars))
        file_names = os.listdir(self.dropped_path)
        for file_name in file_names:
            file_path = os.path.join(self.dropped_path, file_name)
            if not os.path.isfile(file_path):
                continue
            if file_name.endswith("_info.txt"):
                continue
            guest_paths = [line.strip() for line in open(file_path + "_info.txt")]
            guest_name = guest_paths[0].split("\\")[-1]
            file_info = File(file_path=file_path, guest_paths=guest_paths, file_name=guest_name).get_all()
            texttypes = [
                "ASCII",
                "Windows Registry text",
                "XML document text",
                "Unicode text",
            ]
            readit = False
            for texttype in texttypes:
                if texttype in file_info["type"]:
                    readit = True
                    break

            if is_binary_file(open(file_info["path"], 'rb').read(8192)):
                pass
            else:
                if readit:
                    with open(file_info["path"], "r") as drop_open:
                        filedata = drop_open.read(buf + 1)
                    if len(filedata) > buf:
                        file_info["data"] = convert_to_printable(filedata[:buf] + " <truncated>")
                    else:
                        file_info["data"] = convert_to_printable(filedata)

            dropped_files.append(file_info)

        return dropped_files