コード例 #1
0
ファイル: analyzer.py プロジェクト: Augus-Wang/cuckoo
    def dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning("File at path \"%r\" does not exist, skip.", filepath)
            return False

        # Check whether we've already dumped this file - in that case skip it.
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
            if sha256 in self.dumped:
                return
        except IOError as e:
            log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return

        filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

        try:
            upload_to_host(
                # If available use the original filepath, the one that is
                # not lowercased.
                self.files_orig.get(filepath.lower(), filepath),
                upload_path, self.files.get(filepath.lower(), [])
            )
            self.dumped.append(sha256)
        except (IOError, socket.error) as e:
            log.error(
                "Unable to upload dropped file at path \"%s\": %s",
                filepath, e
            )
コード例 #2
0
ファイル: test_netlog.py プロジェクト: LetMeR00t/cuckoo
def test_upload_to_host():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.bind(("127.0.0.1", 0))
    s.listen(5)

    with open("analysis.conf", "wb") as f:
        f.write("[hello]\nip = %s\nport = %d" % s.getsockname())

    handlers = logging.getLogger().handlers[:]
    init_logging()

    # Test file not found exception.
    upload_to_host(u"\u202ethisis404.exe", "1.exe")
    c, _ = s.accept()
    assert "Exception uploading file u'\\u202e" in c.recv(0x1000)
    c, _ = s.accept()
    assert "FILE 2\n1.exe\n\xe2\x80\xaethisis404.exe\n" in c.recv(0x1000)

    # Test correct upload.
    upload_to_host(__file__, "1.py", ["1", "2", "3"])
    c, _ = s.accept()
    assert c.recv(0x1000).startswith(
        "FILE 2\n1.py\n%s\n1 2 3\n# Copyright (C" % __file__
    )

    logging.getLogger().handlers = handlers
コード例 #3
0
ファイル: analyzer.py プロジェクト: NextSecurity/cuckoo
def dump_file(file_path):
    """Create a copy of the give file path."""
    try:
        if os.path.exists(file_path):
            sha256 = hashlib.sha256(open(file_path, "rb").read()).hexdigest()
            if sha256 in DUMPED_LIST:
                # The file was already dumped, just skip.
                return
        else:
            log.warning('File at path "%s" does not exist, skip', file_path)
            return
    except IOError as e:
        log.warning('Unable to access file at path "%s": %s', file_path, e)
        return

    # 32k is the maximum length for a filename
    path = create_unicode_buffer(32 * 1024)
    name = c_wchar_p()
    KERNEL32.GetFullPathNameW(file_path, 32 * 1024, path, byref(name))
    file_path = path.value

    # Check if the path has a valid file name, otherwise it's a directory
    # and we should abort the dump.
    if name.value:
        # Should be able to extract Alternate Data Streams names too.
        file_name = name.value[name.value.find(":") + 1 :]
    else:
        return

    upload_path = os.path.join("files", str(random.randint(100000000, 9999999999)), file_name)
    try:
        upload_to_host(file_path, upload_path)
        DUMPED_LIST.append(sha256)
    except (IOError, socket.error) as e:
        log.error('Unable to upload dropped file at path "%s": %s', file_path, e)
コード例 #4
0
ファイル: analyzer.py プロジェクト: marek2003/cuckoo-modified
def dump_file(file_path):
    """Create a copy of the given file path."""
    duplicate = False
    try:
        if os.path.exists(file_path):
            sha256 = hash_file(hashlib.sha256, file_path)
            if sha256 in DUMPED_LIST:
                # The file was already dumped, just upload the alternate name for it.
                duplicate = True
        else:
            log.warning("File at path \"%s\" does not exist, skip.",
                        file_path.encode("utf-8", "replace"))
            return
    except IOError as e:
        log.warning("Unable to access file at path \"%s\": %s", file_path.encode("utf-8", "replace"), e)
        return

    if os.path.isdir(file_path):
        return
    file_name = os.path.basename(file_path)
    if duplicate:
        idx = DUMPED_LIST.index(sha256)
        upload_path = UPLOADPATH_LIST[idx]
    else:
        upload_path = os.path.join("files",
                               str(random.randint(100000000, 9999999999)),
                               file_name.encode("utf-8", "replace"))
    try:
        upload_to_host(file_path, upload_path, duplicate)
        if not duplicate:
            DUMPED_LIST.append(sha256)
            UPLOADPATH_LIST.append(upload_path)
    except (IOError, socket.error) as e:
        log.error("Unable to upload dropped file at path \"%s\": %s",
                  file_path.encode("utf-8", "replace"), e)
コード例 #5
0
ファイル: process.py プロジェクト: MAECProject/cuckoo
    def dump_memory(self, addr=None, length=None):
        """Dump process memory, optionally target only a certain memory range.
        @return: operation status.
        """
        if not self.pid:
            log.warning("No valid pid specified, memory dump aborted")
            return False

        if not self.is_alive():
            log.warning("The process with pid %d is not alive, memory "
                        "dump aborted", self.pid)
            return False

        if self.is32bit(pid=self.pid):
            inject_exe = os.path.join("bin", "inject-x86.exe")
        else:
            inject_exe = os.path.join("bin", "inject-x64.exe")

        # Take the memory dump.
        dump_path = tempfile.mktemp()

        try:
            args = [
                inject_exe,
                "--pid", "%s" % self.pid,
                "--dump", dump_path,
            ]

            # Restrict to a certain memory block.
            if addr and length:
                args += [
                    "--dump-block",
                    "0x%x" % addr,
                    "%s" % length,
                ]

            subprocess_checkcall(args)
        except subprocess.CalledProcessError:
            log.error("Failed to dump memory of %d-bit process with pid %d.",
                      32 if self.is32bit(pid=self.pid) else 64, self.pid)
            return

        # Calculate the next index and send the process memory dump over to
        # the host. Keep in mind that one process may have multiple process
        # memory dumps in the future.
        idx = self.dumpmem[self.pid] = self.dumpmem.get(self.pid, 0) + 1

        if addr and length:
            file_name = os.path.join(
                "memory", "block-%s-0x%x-%s.dmp" % (self.pid, addr, idx)
            )
        else:
            file_name = os.path.join("memory", "%s-%s.dmp" % (self.pid, idx))

        upload_to_host(dump_path, file_name)
        os.unlink(dump_path)

        log.info("Memory dump of process with pid %d completed", self.pid)
        return True
コード例 #6
0
ファイル: process.py プロジェクト: Rafiot/CAPEv2
    def dump_memory(self):
        """Dump process memory.
        @return: operation status.
        """
        if not self.pid:
            log.warning("No valid pid specified, memory dump aborted")
            return False

        if not self.is_alive():
            log.warning(
                "The process with pid %d is not alive, memory "
                "dump aborted", self.pid)
            return False

        bin_name = ""
        bit_str = ""
        #file_path = os.path.join(PATHS["memory"], "{0}.dmp".format(self.pid))
        file_path = (os.path.join(PATHS["memory"], str(self.pid) + ".dmp"))
        if self.is_64bit():
            orig_bin_name = LOADER64_NAME
            bit_str = "64-bit"
        else:
            orig_bin_name = LOADER32_NAME
            bit_str = "32-bit"

        bin_name = os.path.join(os.getcwd(), orig_bin_name)

        if os.path.exists(bin_name):
            ret = subprocess.call([bin_name, "dump", str(self.pid), file_path])
            if ret == 1:
                log.info("Dumped %s process with pid %d", bit_str, self.pid)
            else:
                log.error("Unable to dump %s process with pid %d, error: %d",
                          bit_str, self.pid, ret)
                return False
        else:
            log.error(
                "Please place the %s binary from cuckoomon into analyzer/windows/bin in order to analyze %s binaries.",
                os.path.basename(bin_name), bit_str)
            return False

        try:
            file_path = os.path.join(PATHS["memory"], str(self.pid) + ".dmp")
            upload_to_host(file_path,
                           os.path.join("memory",
                                        str(self.pid) + ".dmp"))
        except Exception as e:
            print(e)
            log.error(e, exc_info=True)
            log.error(os.path.join("memory", "{0}.dmp".format(self.pid)),
                      file_path)

        log.info("Memory dump of process with pid %d completed", self.pid)

        return True
コード例 #7
0
    def collectLogs(self):
        try:
            subprocess.call(["C:\\Windows\\System32\\wevtutil.exe", "query-events", "microsoft-windows-powershell/operational", "/rd:true", "/e:root", "/format:xml", "/uni:true"], startupinfo=self.startupinfo,  stdout=open("C:\\curtain.log", "w"))
        except Exception as e:
            log.error("Curtain - Error collecting PowerShell events - %s " % e)

        time.sleep(5)

        if os.path.exists("C:\\curtain.log"):
            upload_to_host("C:\\curtain.log", "curtain/%s.curtain.log" % time.time(), False)
        else:
            log.error("Curtain log file not found!")
コード例 #8
0
ファイル: stap.py プロジェクト: kevoreilly/CAPEv2
    def stop(self):
        if not self.enabled:
            return False

        try:
            r = self.proc.poll()
            log.debug("stap subprocess retval %d", r)
            self.proc.kill()
        except Exception as e:
            log.warning("Exception killing stap: %s", e)

        upload_to_host("stap.log", "stap/stap.log", True)
コード例 #9
0
 def upload_artefacts(self, package):
     try:
         # Upload files the package created to package_files in the
         # results folder to host
         package_files = package.package_files()
         if package_files != None:
             for p in package_files:
                 upload_to_host(
                     p[0], os.path.join("package_files", p[1]));
     except Exception as e:
         log.warning("The package \"%s\" package_files function raised an "
                     "exception: %s", package, e)
コード例 #10
0
ファイル: sysmon.py プロジェクト: 5l1v3r1/CAPE-1
    def collect_logs(self):
        try:
                      subprocess.call(["C:\\Windows\\System32\\wevtutil.exe", "query-events", "microsoft-windows-sysmon/operational", "/rd:true", "/e:root", "/format:xml", "/uni:true"], startupinfo=self.startupinfo,  stdout=open("C:\\sysmon.xml", "w"))
        except Exception as e:
            log.error("Could not create sysmon log file - %s" % e)

        # Give it some time to create the file
        time.sleep(5)

        if os.path.exists("C:\\sysmon.xml"):
            upload_to_host("C:\\sysmon.xml", "sysmon/%s.sysmon.xml" % time.time())
        else:
            log.error("Sysmon log file not found in guest machine")
コード例 #11
0
    def finish(self):
        Package.finish(self)

        for i in range(0, 60):
            converted_file_path = self.find_converted_file()
            if converted_file_path:
                upload_path = os.path.join('converted', 'result.pdf')
                upload_to_host(converted_file_path, upload_path)
                break
            else:
                time.sleep(1)

        return True
コード例 #12
0
ファイル: sysmon.py プロジェクト: haam3r/cuckoo
    def finish(self):
        # Query events and dump to file
        try:
            os.system("C:\\Windows\\System32\\wevtutil.exe query-events microsoft-windows-sysmon/operational /rd:false /e:root /format:xml /uni:true > C:\\sysmon.log")
        except Exception as e:
            log.error("Sysmon - Could not export sysmon logs")

        if os.path.exists("C:\\sysmon.log"):
            upload_to_host("C:\\sysmon.log", "sysmon/sysmon.xml")
        else:
            log.error("Sysmon log file not found")

        return True
コード例 #13
0
    def collectLogs(self):

        try:
            os.system("C:\\Windows\\System32\\wevtutil.exe query-events microsoft-windows-powershell/operational /rd:true /e:root /format:xml /uni:true > C:\\curtain.log")
        except Exception as e:
            log.error("Curtain - Error collecting PowerShell events - %s " % e)

        time.sleep(5)

        if os.path.exists("C:\\curtain.log"):
            upload_to_host("C:\\curtain.log", "curtain/%s.curtain.log" % time.time())
        else:
            log.error("Curtain log file not found!")
コード例 #14
0
 def stop(self):
     """Stop snapshotting."""
     try:
         with open(self.filepath, 'w') as f:
             json.dump(self.metrics,
                       f,
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': '))
         upload_to_host(self.filepath, 'files/metrics.json')
         log.info("Successfully uploaded metrics to host.")
     except BaseException as e:
         log.info("An error occured when logging results : " + e.str())
         self.do_run = False
コード例 #15
0
ファイル: sysmon.py プロジェクト: rohitereddy/CAPE
    def collect_logs(self):
        try:
            os.system("C:\\Windows\\System32\\wevtutil.exe query-events "\
                      "microsoft-windows-sysmon/operational /format:xml /e:Events > C:\\sysmon.xml")
        except Exception as e:
            log.error("Could not create sysmon log file - %s" % e)

        # Give it some time to create the file
        time.sleep(5)

        if os.path.exists("C:\\sysmon.xml"):
            upload_to_host("C:\\sysmon.xml",
                           "sysmon/%s.sysmon.xml" % time.time())
        else:
            log.error("Sysmon log file not found in guest machine")
コード例 #16
0
def getFile(filename):
    global new_file_name
    path = ""
    if 'ProcessInterceptor' in filename or 'LWR' in filename:
        root = os.path.join("C:", os.sep, "dlls")
        path = os.path.join(root, filename)
    else:
        root = os.environ["TEMP"]
        path = os.path.join(root, filename)
    if os.path.exists(path):
        upload_to_host(path, os.path.join("files", "processing_files",
                                          filename))
        #upload_to_host(path,os.path.join("files","processing_files",filename),False)
    else:
        log.info("File %s not found" % filename)
        log.info("Path: %s" % path)
コード例 #17
0
ファイル: analyzer.py プロジェクト: hatching/cuckoo-ekhunting
    def stop_package(self, pkg_id, procmemdump=False):
        """Stop the package matching the given package id. Process memory
        dumps are created if

        @param pkg_id: a string identifier to specify a running analysis
        package
        """
        pkg_id = str(pkg_id)
        pkg = self.packages.get(pkg_id)
        if not pkg:
            raise CuckooPackageError(
                "Cannot stop package. Package with id '%r' does not "
                "exist" % pkg_id
            )

        if procmemdump or pkg.options.get("procmemdump"):
            try:
                pkg.dump_procmem()
            except Exception as e:
                log.exception(
                    "Error while creating process memory dumps for "
                    "package '%s'. %s", pkg.__class__.__name__, e
                )

        try:
            pkg.finish()
        except Exception as e:
            log.exception(
                "Error during analysis package '%s' finishing. %s",
                pkg.__class__.__name__, e
            )

        try:
            # Upload files the package created to package_files in the
            # results folder.
            for path, name in pkg.package_files() or []:
                upload_to_host(path, os.path.join("package_files", name))
        except Exception as e:
            log.warning(
                "The package '%s' package_files function raised an "
                "exception: %s", pkg.__class__.__name__, e
            )

        pkg.stop()
        self.packages.pop(pkg_id)
        return {"pkg_id": pkg_id}
コード例 #18
0
    def dump_memory(self):
        """Dump process memory.
        @return: operation status.
        """
        if not self.pid:
            log.warning("No valid pid specified, memory dump aborted")
            return False

        if not self.is_alive():
            log.warning(
                "The process with pid %d is not alive, memory "
                "dump aborted", self.pid)
            return False

        if self.is32bit(pid=self.pid):
            inject_exe = os.path.join("bin", "inject-x86.exe")
        else:
            inject_exe = os.path.join("bin", "inject-x64.exe")

        # Take the memory dump.
        dump_path = tempfile.mktemp()

        try:
            args = [
                inject_exe,
                "--pid",
                "%s" % self.pid,
                "--dump",
                dump_path,
            ]
            subprocess_checkcall(args)
        except subprocess.CalledProcessError:
            log.error("Failed to dump memory of %d-bit process with pid %d.",
                      32 if self.is32bit(pid=self.pid) else 64, self.pid)
            return

        # Calculate the next index and send the process memory dump over to
        # the host. Keep in mind that one process may have multiple process
        # memory dumps in the future.
        idx = self.dumpmem[self.pid] = self.dumpmem.get(self.pid, 0) + 1
        file_name = os.path.join("memory", "%s-%s.dmp" % (self.pid, idx))
        upload_to_host(dump_path, file_name)
        os.unlink(dump_path)

        log.info("Memory dump of process with pid %d completed", self.pid)
        return True
コード例 #19
0
ファイル: process.py プロジェクト: onesorzer0es/CAPEv2
    def upload_memdump(self):
        """Upload process memory dump.
        @return: operation status.
        """
        if not self.pid:
            log.warning("No valid pid specified, memory dump cannot be uploaded")
            return False

        file_path = os.path.join(PATHS["memory"], f"{self.pid}.dmp")
        try:
            upload_to_host(file_path, os.path.join("memory", f"{self.pid}.dmp"), category="memory")
        except Exception as e:
            log.error(e, exc_info=True)
            log.error(os.path.join("memory", f"{self.pid}.dmp"))
            log.error(file_path)
        log.info("Memory dump of process %d uploaded", self.pid)

        return True
コード例 #20
0
ファイル: procmon.py プロジェクト: 0day29/cuckoo
    def stop(self):
        # Terminate process monitor.
        subprocess.check_call([
            self.procmon_exe,
            "/Terminate",
        ])

        # Convert the process monitor log into a readable XML file.
        subprocess.check_call([
            self.procmon_exe,
            "/OpenLog", self.procmon_pml,
            "/LoadConfig", self.procmon_pmc,
            "/SaveAs", self.procmon_xml,
            "/SaveApplyFilter",
        ])

        # Upload the XML file to the host.
        upload_to_host(self.procmon_xml, os.path.join("logs", "procmon.xml"))
コード例 #21
0
ファイル: procmon.py プロジェクト: githubassets/CAPEv2
    def stop(self):
        # Terminate process monitor.
        subprocess.check_call([
            self.procmon_exe,
            "/Terminate",
        ])

        # Convert the process monitor log into a readable XML file.
        subprocess.check_call([
            self.procmon_exe,
            "/OpenLog", self.procmon_pml,
            "/LoadConfig", self.procmon_pmc,
            "/SaveAs", self.procmon_xml,
            "/SaveApplyFilter",
        ])

        # Upload the XML file to the host.
        upload_to_host("procmon.xml", self.procmon_xml)
コード例 #22
0
ファイル: analyzer.py プロジェクト: frank2411/cuckoo_dev
    def _upload_file(self, filepath):
        if not os.path.isfile(filepath):
            return
        # Check whether we've already dumped this file - in that case skip it
        try:
            hashsum = hash_file(hashlib.sha256, filepath)
            if sha256 in self.uploaded_hashes:
                return
        except IOError as e:
            self.log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return
        filename = "%s_%s" % (hashsum[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

        try:
            upload_to_host(filepath, upload_path)
            self.uploaded_hashes.append(hashsum)
        except IOError as e:
            self.log.error("Unable to upload dropped file at path \"%s\": %s", filepath, e)
コード例 #23
0
ファイル: analyzer.py プロジェクト: muhzii/cuckoo
    def _dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning("File at path %s does not exist, skip.", filepath)
            return

        sha256 = None
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
        except IOError as e:
            log.error("Error calculating hash of file '%s': %s", filepath, e)

        # Check if we already dumped the file.
        if sha256 in self.dumped:
            return

        upload_to_host(filepath, "files/" + os.path.basename(filepath))
        if sha256:
            self.dumped.append(sha256)
コード例 #24
0
    def dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning("File at path \"%r\" does not exist, skip.", filepath)
            return False

	#Check if this is not a binary file, we only transfer binary file
	if not self.is_binary_file(filepath):
	    log.warning("File at path \"%r\" is not a binary file, Not transferring.",filepath)
	    return

	#Not dumping more than 10 files
	if Files.dumpedFiles > 10:
	    return

        # Check whether we've already dumped this file - in that case skip it.
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
            if sha256 in self.dumped:
                return
        except IOError as e:
            log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return

        filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

	#Increment dumpedFiles counter
	Files.dumpedFiles = Files.dumpedFiles + 1

        try:
            upload_to_host(
                # If available use the original filepath, the one that is
                # not lowercased.
                self.files_orig.get(filepath.lower(), filepath),
                upload_path, self.files.get(filepath.lower(), [])
            )
            self.dumped.append(sha256)
        except (IOError, socket.error) as e:
            log.error(
                "Unable to upload dropped file at path \"%s\": %s",
                filepath, e
            )
コード例 #25
0
ファイル: analyzer.py プロジェクト: 0day29/cuckoo
    def _upload_file(self, filepath):
        if not path.isfile(filepath):
            return
        # Check whether we've already dumped this file - in that case skip it
        try:
            hashsum = hash_file(sha256, filepath)
            if sha256 in self.uploaded_hashes:
                return
        except IOError as e:
            self.log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return
        filename = "%s_%s" % (hashsum[:16], path.basename(filepath))
        upload_path = path.join("files", filename)

        try:
            upload_to_host(filepath, upload_path)
            self.uploaded_hashes.append(hashsum)
        except IOError as e:
            self.log.error("Unable to upload dropped file at path \"%s\": %s", filepath, e)
コード例 #26
0
        def _method_name(self, event):
            try:
                # log.info("Got file %s %s", event.pathname, method)

                if not self.do_collect:
                    # log.info("Not currently set to collect %s", event.pathname)
                    return

                if event.pathname.startswith("/tmp/#"):
                    # log.info("skipping wierd file %s", event.pathname)
                    return

                if not os.path.isfile(event.pathname):
                    # log.info("Path is a directory or does not exist, ignoring: %s", event.pathname)
                    return

                if os.path.basename(event.pathname) == "stap.log":
                    return

                for x in range(0, 1):
                    try:
                        # log.info("trying to collect file %s", event.pathname)
                        sha256 = hash_file(hashlib.sha256, event.pathname)
                        filename = "%s_%s" % (sha256[:16],
                                              os.path.basename(event.pathname))
                        if filename in self.uploadedHashes:
                            # log.info("already collected file %s", event.pathname)
                            return
                        upload_path = os.path.join("files", filename)
                        upload_to_host(event.pathname, upload_path)
                        self.uploadedHashes.append(filename)
                        return
                    except Exception as e:
                        log.info('Error dumping file from path "%s": %s',
                                 event.pathname, e)

                    # log.info("retrying %s", event.pathname)
                    time.sleep(1)

            except Exception as e:
                log.error("Exception processing event %s", e)
コード例 #27
0
def dump_file(file_path):
    """Create a copy of the given file path and send it to the host."""
    try:
        if os.path.exists(file_path):
            sha256 = hash_file(hashlib.sha256, file_path)
            if sha256 in DUMPED_LIST:
                # The file was already dumped
                # Cuckoo normally just skips the file, I have chosen not to
                #return
                log.warning(
                    "File at path \"%s\" has a hash that is a duplicate of another dumped file.",
                    file_path)
        else:
            log.warning("File at path \"%s\" does not exist, skip.", file_path)
            return
    except IOError as e:
        log.warning("Unable to access file at path \"%s\": %s", file_path, e)
        return

    log.info("File path is %s and file size is %d.", file_path,
             os.stat(file_path).st_size)

    #choose the correct folder
    if "logs" in file_path:
        upload_path = os.path.join("logs", os.path.basename(file_path))
    elif "drop" in file_path:
        upload_path = os.path.join("files", os.path.basename(file_path))
    else:
        upload_path = os.path.join("files",
                                   str(random.randint(100000000, 9999999999)),
                                   os.path.basename(file_path))
    log.info("Upload path is %s.", upload_path)

    #send file to host
    try:
        upload_to_host(file_path, upload_path)
        DUMPED_LIST.append(sha256)
    except (IOError, socket.error) as e:
        log.error("Unable to upload dropped file at path \"%s\": %s",
                  file_path, e)
コード例 #28
0
 def stop(self):
     os.system("/sbin/rmmod ktrace")
     log.info("ktrace stop() %d" % os.getpid())
     os.system("/bin/dmesg > /var/log/ktrace.log")
     upload_to_host("/var/log/ktrace.log", "logs/ktrace.log")
     upload_to_host("/proc/version", "files/version")
     upload_to_host("/proc/modules", "files/modules")
コード例 #29
0
def dump_file(file_path):
    """Create a copy of the give file path."""
    try:
        if os.path.exists(file_path):
            sha256 = hash_file(hashlib.sha256, file_path)
            if sha256 in DUMPED_LIST:
                # The file was already dumped, just skip.
                return
        else:
            log.warning("File at path \"%s\" does not exist, skip.",
                        file_path)
            return
    except IOError as e:
        log.warning("Unable to access file at path \"%s\": %s", file_path, e)
        return

    # 32k is the maximum length for a filename
    path = create_unicode_buffer(32 * 1024)
    name = c_wchar_p()
    KERNEL32.GetFullPathNameW(unicode(file_path), 32 * 1024, path, byref(name))
    file_path = path.value

    # Check if the path has a valid file name, otherwise it's a directory
    # and we should abort the dump.
    if name.value:
        # Should be able to extract Alternate Data Streams names too.
        file_name = name.value[name.value.find(":")+1:]
    else:
        return

    upload_path = os.path.join("files",
                               str(random.randint(100000000, 9999999999)),
                               file_name)
    try:
        upload_to_host(file_path, upload_path)
        DUMPED_LIST.append(sha256)
    except (IOError, socket.error) as e:
        log.error("Unable to upload dropped file at path \"%s\": %s",
                  file_path, e)
コード例 #30
0
ファイル: analyzer.py プロジェクト: ksmaheshkumar/cuckoo
    def dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning('File at path "%s" does not exist, skip.', filepath)
            return False

        # Check whether we've already dumped this file - in that case skip it.
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
            if sha256 in self.dumped:
                return
        except IOError as e:
            log.info('Error dumping file from path "%s": %s', filepath, e)
            return

        filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

        try:
            upload_to_host(filepath, upload_path)
            self.dumped.append(sha256)
        except (IOError, socket.error) as e:
            log.error('Unable to upload dropped file at path "%s": %s', filepath, e)
コード例 #31
0
def dump_file(file_path):
    """Create a copy of the given file path."""
    duplicate = False
    try:
        if os.path.exists(file_path):
            sha256 = hash_file(hashlib.sha256, file_path)
            if sha256 in DUMPED_LIST:
                # The file was already dumped, just upload the alternate name for it.
                duplicate = True
        else:
            log.warning("File at path \"%s\" does not exist, skip.",
                        file_path.encode("utf-8", "replace"))
            return
    except IOError as e:
        log.warning("Unable to access file at path \"%s\": %s",
                    file_path.encode("utf-8", "replace"), e)
        return

    if os.path.isdir(file_path):
        return
    name = os.path.basename(file_path)
    file_name = name[name.find(u":") + 1:]
    if duplicate:
        idx = DUMPED_LIST.index(sha256)
        upload_path = UPLOADPATH_LIST[idx]
    else:
        upload_path = os.path.join("files",
                                   str(random.randint(100000000, 9999999999)),
                                   file_name.encode("utf-8", "replace"))
    try:
        upload_to_host(file_path, upload_path, duplicate)
        if not duplicate:
            DUMPED_LIST.append(sha256)
            UPLOADPATH_LIST.append(upload_path)
    except (IOError, socket.error) as e:
        log.error("Unable to upload dropped file at path \"%s\": %s",
                  file_path.encode("utf-8", "replace"), e)
コード例 #32
0
    def dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning("File at path %r does not exist, skip.", filepath)
            return False

        # Check whether we've already dumped this file - in that case skip it.
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
            if sha256 in self.dumped:
                return
        except IOError as e:
            log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return

        file_size = os.path.getsize(filepath)
        will_exceed_total = self.dumped_bytes + file_size > self.MAX_SIZE_TOTAL

        if file_size > self.MAX_SIZE_SINGLE or will_exceed_total:
            log.info("File from path \"%s\" exceeded size limits", filepath)
            return

        filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

        try:
            upload_to_host(
                # If available use the original filepath, the one that is
                # not lowercased.
                self.files_orig.get(filepath.lower(), filepath),
                upload_path,
                self.files.get(filepath.lower(), []))
            self.dumped.append(sha256)
            self.dumped_bytes += file_size
        except (IOError, socket.error) as e:
            log.error("Unable to upload dropped file at path \"%s\": %s",
                      filepath, e)
コード例 #33
0
ファイル: analyzer.py プロジェクト: kcberg/cuckoo
    def dump_file(self, filepath):
        """Dump a file to the host."""
        if not os.path.isfile(filepath):
            log.warning("File at path \"%r\" does not exist, skip.", filepath)
            return False

        # Check whether we've already dumped this file - in that case skip it.
        try:
            sha256 = hash_file(hashlib.sha256, filepath)
            if sha256 in self.dumped:
                return
        except IOError as e:
            log.info("Error dumping file from path \"%s\": %s", filepath, e)
            return

        filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
        upload_path = os.path.join("files", filename)

        try:
            upload_to_host(filepath, upload_path)
            self.dumped.append(sha256)
        except (IOError, socket.error) as e:
            log.error("Unable to upload dropped file at path \"%s\": %s",
                      filepath, e)
コード例 #34
0
ファイル: analyzer.py プロジェクト: onesorzer0es/CAPEv2
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.debug("Starting analyzer from: %s", os.getcwd())
        log.debug("Storing results at: %s", PATHS["root"])

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        """
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect it automagically")

            package = "generic" if self.config.category == "file" else "wget"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError(f"No valid package available for file type: {self.config.file_type}")

            log.info('Automatically selected analysis package "%s"', package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = f"modules.packages.{package}"

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], 0)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError('Unable to import package "{package_name}", does not exist')

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError(f"Unable to select package class (package={package_name}): {e}")
        """
        if self.config.package:
            suggestion = "ff" if self.config.package == "ie" else self.config.package
        elif self.config.category != "file":
            suggestion = "url"
        else:
            suggestion = None

        # Try to figure out what analysis package to use with this target
        kwargs = {"suggestion": suggestion}
        if self.config.category == "file":
            package_class = choose_package_class(self.config.file_type,
                                                 self.config.file_name,
                                                 **kwargs)
        else:
            package_class = choose_package_class(None, None, **kwargs)

        if not package_class:
            raise Exception("Could not find an appropriate analysis package")
        # Package initialization
        kwargs = {
            "options": self.config.options,
            "timeout": self.config.timeout
        }

        # Initialize the analysis package.
        # pack = package_class(self.config.get_options())
        pack = package_class(self.target, **kwargs)
        # Initialize Auxiliary modules
        Auxiliary()
        prefix = f"{auxiliary.__name__}."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], 0)
            except ImportError as e:
                log.warning('Unable to import the auxiliary module "%s": %s',
                            name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in sorted(Auxiliary.__subclasses__(),
                             key=lambda x: x.priority,
                             reverse=True):
            # Try to start the auxiliary module.
            try:
                aux = module()
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            aux.__class__.__name__)
                continue
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            aux.__class__.__name__, e)
                continue
            finally:
                log.debug("Started auxiliary module %s",
                          aux.__class__.__name__)
                # aux_enabled.append(aux)
                if aux:
                    aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            # pids = pack.start(self.target)
            pids = pack.start()
        except NotImplementedError:
            raise CuckooError(
                f'The package "{package_class}" doesn\'t contain a run function'
            )
        except CuckooPackageError as e:
            raise CuckooError(
                f'The package "{package_class}" start function raised an error: {e}'
            )
        except Exception as e:
            raise CuckooError(
                f'The package "{package_class}" start function encountered an unhandled exception: {e}'
            )

        # If the analysis package returned a list of process IDs, we add them
        # to the list of monitored processes and enable the process monitor.
        if pids:
            add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info(
                "No process IDs returned by the package, running for the full timeout"
            )
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout")
            pid_check = False

        time_counter = 0

        while True:
            time_counter += 1
            if time_counter > int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis")
                break

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in list(PROCESS_LIST):
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            PROCESS_LIST.remove(pid)

                    # ToDo
                    # ask the package if it knows any new pids
                    # add_pids(pack.get_pids())

                    # also ask the auxiliaries
                    for aux in aux_avail:
                        add_pids(aux.get_pids())

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not PROCESS_LIST:
                        log.info("Process list is empty, terminating analysis")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    pack.set_pids(PROCESS_LIST)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not pack.check():
                        log.info(
                            "The analysis package requested the termination of the analysis"
                        )
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        'The package "%s" check function raised an exception: %s',
                        package_class, e)
            except Exception as e:
                log.exception("The PID watching loop raised an exception: %s",
                              e)
            finally:
                # Zzz.
                time.sleep(1)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            pack.finish()
        except Exception as e:
            log.warning(
                'The package "%s" finish function raised an exception: %s',
                package_class, e)

        try:
            # Upload files the package created to files in the results folder
            package_files = pack.package_files()
            if package_files is not None:
                for package in package_files:
                    upload_to_host(package[0],
                                   os.path.join("files", package[1]))
        except Exception as e:
            log.warning(
                'The package "%s" package_files function raised an exception: %s',
                package_class, e)

        # Terminate the Auxiliary modules.
        for aux in sorted(aux_enabled, key=lambda x: x.priority):
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes. We do this to make sure
            # that we clean up remaining open handles (sockets, files, etc.).
            log.info("Terminating remaining processes before shutdown")

            for pid in PROCESS_LIST:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except Exception:
                        continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary module %s: %s",
                    aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True
コード例 #35
0
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.debug("Starting analyzer from: %s", os.getcwd())
        log.debug("Storing results at: %s", PATHS["root"])
        log.debug("Pipe server name: %s", PIPE)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type,
                                         self.config.file_name)
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        pack = package_class(self.config.get_options())

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning(
                    "Unable to import the auxiliary module "
                    "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module()
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            aux.__class__.__name__)
                continue
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            aux.__class__.__name__, e)
                continue
            finally:
                log.debug("Started auxiliary module %s",
                          aux.__class__.__name__)
                aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            pids = pack.start(self.target)
        except NotImplementedError:
            raise CuckooError("The package \"{0}\" doesn't contain a run "
                              "function.".format(package_name))
        except CuckooPackageError as e:
            raise CuckooError("The package \"{0}\" start function raised an "
                              "error: {1}".format(package_name, e))
        except Exception as e:
            raise CuckooError("The package \"{0}\" start function encountered "
                              "an unhandled exception: "
                              "{1}".format(package_name, e))

        # If the analysis package returned a list of process IDs, we add them
        # to the list of monitored processes and enable the process monitor.
        if pids:
            add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        time_counter = 0

        while True:
            time_counter += 1
            if time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we
            # cannot proceed with the checks until the lock is released.
            if PROCESS_LOCK.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in PROCESS_LIST:
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            PROCESS_LIST.remove(pid)

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not PROCESS_LIST:
                        log.info("Process list is empty, "
                                 "terminating analysis.")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    pack.set_pids(PROCESS_LIST)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not pack.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        "The package \"%s\" check function raised "
                        "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        # Create the shutdown mutex.
        KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            pack.finish()
        except Exception as e:
            log.warning(
                "The package \"%s\" finish function raised an "
                "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the results folder
            package_files = pack.package_files()
            if package_files != None:
                for package in package_files:
                    upload_to_host(package[0],
                                   os.path.join("package_files", package[1]))
        except Exception as e:
            log.warning(
                "The package \"%s\" package_files function raised an "
                "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes. We do this to make sure
            # that we clean up remaining open handles (sockets, files, etc.).
            log.info("Terminating remaining processes before shutdown.")

            for pid in PROCESS_LIST:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except:
                        continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary "
                    "module %s: %s", aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True
コード例 #36
0
ファイル: analyzer.py プロジェクト: NextSecurity/cuckoo
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.info("Starting analyzer from: %s", os.getcwd())
        log.info("Storing results at: %s", PATHS["root"])
        log.info("Pipe server name: %s", PIPE)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.info("No analysis package specified, trying to detect " "it automagically")
            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type, self.config.file_name)
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file " "type: {0}".format(self.config.file_type))

            log.info('Automatically selected analysis package "%s"', package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError('Unable to import package "{0}", does ' "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract's subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class " "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        pack = package_class(self.get_options())

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning("Unable to import the auxiliary module " '"%s": %s', name, e)

        # Walk through the available auxiliary modules.
        aux_enabled = []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module()
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented", aux.__class__.__name__)
                continue
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s", aux.__class__.__name__, e)
                continue
            finally:
                log.info("Started auxiliary module %s", aux.__class__.__name__)
                aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            pids = pack.start(self.target)
        except NotImplementedError:
            raise CuckooError('The package "{0}" doesn\'t contain a run ' "function.".format(package_name))
        except CuckooPackageError as e:
            raise CuckooError('The package "{0}" start function raised an ' "error: {1}".format(package_name, e))
        except Exception as e:
            raise CuckooError(
                'The package "{0}" start function encountered ' "an unhandled exception: " "{1}".format(package_name, e)
            )

        # If the analysis package returned a list of process IDs, we add them
        # to the list of monitored processes and enable the process monitor.
        if pids:
            add_pids(pids)
            pid_check = True
        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running " "for the full timeout")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout")
            pid_check = False

        time_counter = 0

        while True:
            time_counter += 1
            if time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we cannot
            # proceed with the checks until the lock is released.
            if PROCESS_LOCK.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in PROCESS_LIST:
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            PROCESS_LIST.remove(pid)

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if len(PROCESS_LIST) == 0:
                        log.info("Process list is empty, " "terminating analysis...")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    pack.set_pids(PROCESS_LIST)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not pack.check():
                        log.info("The analysis package requested the " "termination of the analysis...")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning('The package "%s" check function raised ' "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            pack.finish()
        except Exception as e:
            log.warning('The package "%s" finish function raised an ' "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the results folder
            package_files = pack.package_files()
            if package_files != None:
                for package in package_files:
                    upload_to_host(package[0], os.path.join("package_files", package[1]))
        except Exception as e:
            log.warning('The package "%s" package_files function raised an ' "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s", aux.__class__.__name__, e)

        # Try to terminate remaining active processes. We do this to make sure
        # that we clean up remaining open handles (sockets, files, etc.).
        log.info("Terminating remaining processes before shutdown...")

        for pid in PROCESS_LIST:
            proc = Process(pid=pid)
            if proc.is_alive():
                try:
                    proc.terminate()
                except:
                    continue

        # Let's invoke the completion procedure.
        self.complete()

        return True
コード例 #37
0
ファイル: analyzer.py プロジェクト: onlyff/MalwareLogAnalysis
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()
        self.path = os.getcwd()

        log.debug("Starting analyzer from: %s", self.path)
        log.debug("Pipe server name: %s", self.config.pipe)
        log.debug("Log pipe server name: %s", self.config.logpipe)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type,
                                         self.config.file_name,
                                         self.config.pe_exports.split(","))
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        self.package = package_class(self.config.options, analyzer=self)

        # Move the sample to the current working directory as provided by the
        # task - one is able to override the starting path of the sample.
        # E.g., for some samples it might be useful to run from %APPDATA%
        # instead of %TEMP%.
        if self.config.category == "file":
            self.target = self.package.move_curdir(self.target)

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning(
                    "Unable to import the auxiliary module "
                    "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module(options=self.config.options, analyzer=self)
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            module.__name__)
            except CuckooDisableModule:
                continue
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            module.__name__, e)
            else:
                log.debug("Started auxiliary module %s", module.__name__)
                aux_enabled.append(aux)

###########################################################
#new code 2017_05_19
        shutil.copyfile(self.target, 'C:\\dbg\\sample.exe')
        with open('log.txt', 'w') as creation:
            creation.write('log start')

        with open(self.target, 'rb') as sample:
            s = sample.read(2)
            if s != 'MZ':
                is32bit = False
            else:
                sample.seek(60)
                s = sample.read(4)
                header_offset = struct.unpack("<L", s)[0]
                sample.seek(header_offset + 4)
                s = sample.read(2)
                machine = struct.unpack('<H', s)[0]

                is32bit = (machine == 332)

        if is32bit:
            self.target = 'C:\\dbg\\Helper32.exe'
        else:
            self.target = 'C:\\dbg\\Helper64.exe'

        try:
            proc = Popen(self.target)
            pids = proc.pid
        except Exception as e:
            log.error('custom : fail to open process %s : %s', self.target, e)

###########################################################
#origin
# Start analysis package. If for any reason, the execution of the
# analysis package fails, we have to abort the analysis.
# try:
#     pids = self.package.start(self.target)
# except NotImplementedError:
#     raise CuckooError(
#         "The package \"%s\" doesn't contain a run function." %
#         package_name
#     )
# except CuckooPackageError as e:
#     raise CuckooError(
#         "The package \"%s\" start function raised an error: %s" %
#         (package_name, e)
#     )
# except Exception as e:
#     raise CuckooError(
#         "The package \"%s\" start function encountered an unhandled "
#         "exception: %s" % (package_name, e)
#     )
###########################################################

# If the analysis package returned a list of process identifiers, we
# add them to the list of monitored processes and enable the process monitor.
        if pids:
            self.process_list.add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        while self.do_run:
            self.time_counter += 1
            if self.time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we
            # cannot proceed with the checks until the lock is released.
            if self.process_lock.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in self.process_list.pids:
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            self.process_list.remove_pid(pid)

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not self.process_list.pids:
                        log.info("Process list is empty, "
                                 "terminating analysis.")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    self.package.set_pids(self.process_list.pids)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not self.package.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        "The package \"%s\" check function raised "
                        "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        if not self.do_run:
            log.debug("The analyzer has been stopped on request by an "
                      "auxiliary module.")

        # Create the shutdown mutex.
        KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            self.package.finish()
        except Exception as e:
            log.warning(
                "The package \"%s\" finish function raised an "
                "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the
            # results folder.
            for path, name in self.package.package_files() or []:
                upload_to_host(path, os.path.join("package_files", name))
        except Exception as e:
            log.warning(
                "The package \"%s\" package_files function raised an "
                "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes.
            log.info("Terminating remaining processes before shutdown.")

            for pid in self.process_list.pids:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except:
                        continue

###############################################################################
#new code
        time.sleep(3)
        with open('C:\\dbg\\log.txt') as f_log:
            raw = f_log.read()
            data = ''.join(raw.split('\x00'))
            log.debug('logged : \n%s', data)


###############################################################################

# Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary "
                    "module %s: %s", aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True
コード例 #38
0
ファイル: analyzer.py プロジェクト: hatching/cuckoo-ekhunting
    def stop(self):
        """Stop the analyzer and all running modules."""
        log.info("Stopping analysis")
        for pkg_id, package in self.packages.iteritems():

            if package.options.get("procmemdump"):
                try:
                    package.dump_procmem()
                except Exception as e:
                    log.exception(
                        "Error during the creation of a memory dump for"
                        " package '%s'. %s", package.__class__.__name__, e
                    )

            try:
                # Perform final operations for all analysis packages
                package.finish()
            except Exception as e:
                log.exception(
                    "The analysis package '%s' raised an exception in its "
                    "finish method. %s", package.__class__.__name__, e
                )

            try:
                # Upload files the package created to package_files in the
                # results folder.
                for path, name in package.package_files() or []:
                    upload_to_host(path, os.path.join("package_files", name))
            except Exception as e:
                log.exception(
                    "The package '%s' package_files function raised an "
                    "exception: %s", package.__class__.__name__, e
                )

        for aux_name, aux in self.aux_enabled.iteritems():
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.exception(
                    "Failed to terminate auxiliary module: %s. %s",
                    aux_name, e
                )

        if self.config.terminate_processes:
            self.plist.terminate_tracked()

        for aux_name, aux in self.aux_enabled.iteritems():
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.exception(
                    "Failed to terminate auxiliary module: %s. %s", aux_name, e
                )

        # Stop the pipe for commands to be sent to the analyzer.
        self.command_pipe.stop()

        # Upload all pending files before ending the analysis
        self.files.dump_files()
        return True
コード例 #39
0
 def stop(self):
     upload_to_host(self.filepath, os.path.join("logs", "dbgview.log"))
コード例 #40
0
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()
        self.path = os.getcwd()

        log.debug("Starting analyzer from: %s", self.path)
        log.debug("Pipe server name: %s", self.config.pipe)
        log.debug("Log pipe server name: %s", self.config.logpipe)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type,
                                         self.config.file_name,
                                         self.config.pe_exports.split(","))
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        self.package = package_class(self.config.options, analyzer=self)

        # Move the sample to the current working directory as provided by the
        # task - one is able to override the starting path of the sample.
        # E.g., for some samples it might be useful to run from %APPDATA%
        # instead of %TEMP%.
        if self.config.category == "file":
            self.target = self.package.move_curdir(self.target)

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(
                auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning(
                    "Unable to import the auxiliary module "
                    "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module(options=self.config.options, analyzer=self)
                aux_avail.append(aux)
                aux.init()
                aux.start()
            except (NotImplementedError, AttributeError):
                log.exception("Auxiliary module %s was not implemented",
                              module.__name__)
            except CuckooDisableModule:
                continue
            except Exception as e:
                log.exception("Cannot execute auxiliary module %s: %s",
                              module.__name__, e)
            else:
                log.debug("Started auxiliary module %s", module.__name__)
                aux_enabled.append(aux)

        # Inform zer0m0n of the ResultServer address.
        zer0m0n.resultserver(self.config.ip, self.config.port)

        # Forward the command pipe and logpipe names on to zer0m0n.
        zer0m0n.cmdpipe(self.config.pipe)
        zer0m0n.channel(self.config.logpipe)

        # Hide the Cuckoo Analyzer & Cuckoo Agent.
        zer0m0n.hidepid(self.pid)
        zer0m0n.hidepid(self.ppid)

        # Initialize zer0m0n with our compiled Yara rules.
        zer0m0n.yarald("bin/rules.yarac")

        # Propagate the requested dump interval, if set.
        zer0m0n.dumpint(int(self.config.options.get("dumpint", "0")))

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        pids = self.package.start(self.target)

        # If the analysis package returned a list of process identifiers, we
        # add them to the list of monitored processes and enable the process monitor.
        if pids:
            self.process_list.add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        while self.do_run:
            self.time_counter += 1
            if self.time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we
            # cannot proceed with the checks until the lock is released.
            if self.process_lock.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    # We also track the PIDs provided by zer0m0n.
                    self.process_list.add_pids(zer0m0n.getpids())

                    for pid in self.process_list.pids:
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            self.process_list.remove_pid(pid)

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not self.process_list.pids:
                        log.info("Process list is empty, "
                                 "terminating analysis.")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    self.package.set_pids(self.process_list.pids)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not self.package.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning(
                        "The package \"%s\" check function raised "
                        "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        if not self.do_run:
            log.debug("The analyzer has been stopped on request by an "
                      "auxiliary module.")

        # Create the shutdown mutex.
        KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            self.package.finish()
        except Exception as e:
            log.warning(
                "The package \"%s\" finish function raised an "
                "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the
            # results folder.
            for path, name in self.package.package_files() or []:
                upload_to_host(path, os.path.join("package_files", name))
        except Exception as e:
            log.warning(
                "The package \"%s\" package_files function raised an "
                "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes.
            log.info("Terminating remaining processes before shutdown.")

            for pid in self.process_list.pids:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except:
                        continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning(
                    "Exception running finish callback of auxiliary "
                    "module %s: %s", aux.__class__.__name__, e)

        # Dump all the notified files.
        self.files.dump_files()

        # Hell yeah.
        log.info("Analysis completed.")
        return True
コード例 #41
0
ファイル: dbgview.py プロジェクト: CERT-Polska/cuckoo
 def stop(self):
     upload_to_host(self.filepath, os.path.join("logs", "dbgview.log"))
コード例 #42
0
ファイル: analyzer.py プロジェクト: suduma/cuckoo
    def run(self):
        """Run analysis.
        @return: operation status.
        """
        self.prepare()

        log.debug("Starting analyzer from: %s", os.getcwd())
        log.debug("Pipe server name: %s", self.config.pipe)
        log.debug("Log pipe server name: %s", self.config.logpipe)

        # If no analysis package was specified at submission, we try to select
        # one automatically.
        if not self.config.package:
            log.debug("No analysis package specified, trying to detect "
                      "it automagically.")

            # If the analysis target is a file, we choose the package according
            # to the file format.
            if self.config.category == "file":
                package = choose_package(self.config.file_type,
                                         self.config.file_name,
                                         self.config.pe_exports.split(","))
            # If it's an URL, we'll just use the default Internet Explorer
            # package.
            else:
                package = "ie"

            # If we weren't able to automatically determine the proper package,
            # we need to abort the analysis.
            if not package:
                raise CuckooError("No valid package available for file "
                                  "type: {0}".format(self.config.file_type))

            log.info("Automatically selected analysis package \"%s\"", package)
        # Otherwise just select the specified package.
        else:
            package = self.config.package

        # Generate the package path.
        package_name = "modules.packages.%s" % package

        # Try to import the analysis package.
        try:
            __import__(package_name, globals(), locals(), ["dummy"], -1)
        # If it fails, we need to abort the analysis.
        except ImportError:
            raise CuckooError("Unable to import package \"{0}\", does "
                              "not exist.".format(package_name))

        # Initialize the package parent abstract.
        Package()

        # Enumerate the abstract subclasses.
        try:
            package_class = Package.__subclasses__()[0]
        except IndexError as e:
            raise CuckooError("Unable to select package class "
                              "(package={0}): {1}".format(package_name, e))

        # Initialize the analysis package.
        self.package = package_class(self.config.options)

        # Move the sample to the current working directory as provided by the
        # task - one is able to override the starting path of the sample.
        # E.g., for some samples it might be useful to run from %APPDATA%
        # instead of %TEMP%.
        if self.config.category == "file":
            self.target = self.package.move_curdir(self.target)

        # Initialize Auxiliary modules
        Auxiliary()
        prefix = auxiliary.__name__ + "."
        for loader, name, ispkg in pkgutil.iter_modules(auxiliary.__path__, prefix):
            if ispkg:
                continue

            # Import the auxiliary module.
            try:
                __import__(name, globals(), locals(), ["dummy"], -1)
            except ImportError as e:
                log.warning("Unable to import the auxiliary module "
                            "\"%s\": %s", name, e)

        # Walk through the available auxiliary modules.
        aux_enabled, aux_avail = [], []
        for module in Auxiliary.__subclasses__():
            # Try to start the auxiliary module.
            try:
                aux = module(options=self.config.options, analyzer=self)
                aux_avail.append(aux)
                aux.start()
            except (NotImplementedError, AttributeError):
                log.warning("Auxiliary module %s was not implemented",
                            aux.__class__.__name__)
            except Exception as e:
                log.warning("Cannot execute auxiliary module %s: %s",
                            aux.__class__.__name__, e)
            else:
                log.debug("Started auxiliary module %s",
                          aux.__class__.__name__)
                aux_enabled.append(aux)

        # Start analysis package. If for any reason, the execution of the
        # analysis package fails, we have to abort the analysis.
        try:
            pids = self.package.start(self.target)
        except NotImplementedError:
            raise CuckooError("The package \"{0}\" doesn't contain a run "
                              "function.".format(package_name))
        except CuckooPackageError as e:
            raise CuckooError("The package \"{0}\" start function raised an "
                              "error: {1}".format(package_name, e))
        except Exception as e:
            raise CuckooError("The package \"{0}\" start function encountered "
                              "an unhandled exception: "
                              "{1}".format(package_name, e))

        # If the analysis package returned a list of process identifiers, we
        # add them to the list of monitored processes and enable the process monitor.
        if pids:
            self.process_list.add_pids(pids)
            pid_check = True

        # If the package didn't return any process ID (for example in the case
        # where the package isn't enabling any behavioral analysis), we don't
        # enable the process monitor.
        else:
            log.info("No process IDs returned by the package, running "
                     "for the full timeout.")
            pid_check = False

        # Check in the options if the user toggled the timeout enforce. If so,
        # we need to override pid_check and disable process monitor.
        if self.config.enforce_timeout:
            log.info("Enabled timeout enforce, running for the full timeout.")
            pid_check = False

        while self.do_run:
            self.time_counter += 1
            if self.time_counter == int(self.config.timeout):
                log.info("Analysis timeout hit, terminating analysis.")
                break

            # If the process lock is locked, it means that something is
            # operating on the list of monitored processes. Therefore we
            # cannot proceed with the checks until the lock is released.
            if self.process_lock.locked():
                KERNEL32.Sleep(1000)
                continue

            try:
                # If the process monitor is enabled we start checking whether
                # the monitored processes are still alive.
                if pid_check:
                    for pid in self.process_list.pids:
                        if not Process(pid=pid).is_alive():
                            log.info("Process with pid %s has terminated", pid)
                            self.process_list.remove_pid(pid)

                    # If none of the monitored processes are still alive, we
                    # can terminate the analysis.
                    if not self.process_list.pids:
                        log.info("Process list is empty, "
                                 "terminating analysis.")
                        break

                    # Update the list of monitored processes available to the
                    # analysis package. It could be used for internal
                    # operations within the module.
                    self.package.set_pids(self.process_list.pids)

                try:
                    # The analysis packages are provided with a function that
                    # is executed at every loop's iteration. If such function
                    # returns False, it means that it requested the analysis
                    # to be terminate.
                    if not self.package.check():
                        log.info("The analysis package requested the "
                                 "termination of the analysis.")
                        break

                # If the check() function of the package raised some exception
                # we don't care, we can still proceed with the analysis but we
                # throw a warning.
                except Exception as e:
                    log.warning("The package \"%s\" check function raised "
                                "an exception: %s", package_name, e)
            finally:
                # Zzz.
                KERNEL32.Sleep(1000)

        if not self.do_run:
            log.debug("The analyzer has been stopped on request by an "
                      "auxiliary module.")

        # Create the shutdown mutex.
        KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)

        try:
            # Before shutting down the analysis, the package can perform some
            # final operations through the finish() function.
            self.package.finish()
        except Exception as e:
            log.warning("The package \"%s\" finish function raised an "
                        "exception: %s", package_name, e)

        try:
            # Upload files the package created to package_files in the
            # results folder.
            for path, name in self.package.package_files() or []:
                upload_to_host(path, os.path.join("package_files", name))
        except Exception as e:
            log.warning("The package \"%s\" package_files function raised an "
                        "exception: %s", package_name, e)

        # Terminate the Auxiliary modules.
        for aux in aux_enabled:
            try:
                aux.stop()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Cannot terminate auxiliary module %s: %s",
                            aux.__class__.__name__, e)

        if self.config.terminate_processes:
            # Try to terminate remaining active processes. We do this to make sure
            # that we clean up remaining open handles (sockets, files, etc.).
            log.info("Terminating remaining processes before shutdown.")

            for pid in self.process_list.pids:
                proc = Process(pid=pid)
                if proc.is_alive():
                    try:
                        proc.terminate()
                    except:
                        continue

        # Run the finish callback of every available Auxiliary module.
        for aux in aux_avail:
            try:
                aux.finish()
            except (NotImplementedError, AttributeError):
                continue
            except Exception as e:
                log.warning("Exception running finish callback of auxiliary "
                            "module %s: %s", aux.__class__.__name__, e)

        # Let's invoke the completion procedure.
        self.complete()

        return True