def startUp(self, context):
        self.logger = Logger.getLogger(SampleJythonFileIngestModuleFactory.moduleName)
        self.filesFound = 0		

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass
    def process(self, dataSource, progressBar):
        if self.context.isJobCancelled():
            return IngestModule.ProcessResult.OK
			
        logger = Logger.getLogger(SampleJythonDataSourceIngestModuleFactory.moduleName)	

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        autopsyCase = Case.getCurrentCase()
        sleuthkitCase = autopsyCase.getSleuthkitCase()
        services = Services(sleuthkitCase)
        fileManager = services.getFileManager()

        # For our example, we will use FileManager to get all 
        # files with the word "test"
        # in the name and then count and read them
        files = fileManager.findFiles(dataSource, "%test%")

        numFiles = len(files)
        logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0;
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "Processing file: " + file.getName())
            fileCount += 1

            # Make an artifact on the blackboard.  TSK_INTERESTING_FILE_HIT is a generic type of
            # artfiact.  Refer to the developer docs for other examples.
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file")
            art.addAttribute(att)

            
            # To further the example, this code will read the contents of the file and count the number of bytes
            inputStream = ReadContentInputStream(file)
            buffer = jarray.zeros(1024, "b")
            totLen = 0
            readLen = inputStream.read(buffer)
            while (readLen != -1):
                totLen = totLen + readLen
                readLen = inputStream.read(buffer)


            # Update the progress bar
            progressBar.progress(fileCount)


        #Post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
            "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount)
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK;
 def __init__(self, settings):
     self.context = None
     self.local_settings = settings
     self._logger = Logger.getLogger(self.__class__.__name__)
     self._logger.log(Level.SEVERE, "Starting of plugin")
     self.fbPeopleDict = {}
     self.chatMessages = []
     self.fbOwnerId = 0
    def startUp(self, context):
        self.logger = Logger.getLogger(SampleFileIngestModuleWithUIFactory.moduleName)

        # As an example, determine if user configured a flag in UI
        if self.local_settings.getFlag():
            self.logger.logp(Level.INFO, SampleFileIngestModuleWithUI.__name__, "startUp", "flag is set")
        else:
            self.logger.logp(Level.INFO, SampleFileIngestModuleWithUI.__name__, "startUp", "flag is not set")
        
        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass
    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")
        self.stringColumns = (('TSK_OPERATION_STATE','Operation State'), ('TSK_FILE_SIZE', 'Size (Bytes)'), \
                              ('TSK_ATTRIBUTES','Attributes'), ('TSK_ISFOLDER','Is Folder'), \
                              ('TSK_SOURCE_CRC','Source CRC'), ('TSK_TARGET_CRC','Target CRC'), \
                              ('TSK_MESSAGE','Message'), ('TSK_OPERATION_TYPE','Operation Type'), \
                              ('TSK_HISTORY_FILE','History File'), ('TSK_SOURCE_LOCATION', 'File Source Location'), \
                              ('TSK_TARGET_LOCATION', 'Target Location'), ('TSK_FILE_PATH', 'File Path'))

        self.dateColumns = []
                              
        self.dateColumn = ('TSK_DATETIME_START', 'TSK_DATETIME_ACCESSED', 'TSK_DATETIME_CREATED', 'TSK_DATETIME_MODIFIED', \
                           'TSK_ACTCACHE_CRT_CLOUD', 'TSK_ACTCACHE_LAST_MOD_CLIENT', 'TSK_ACTCACHE_ORIG_LMOC')
    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")
        self.stringColumns = (('TSK_ACTCACHE_ID','ActivityCache Id'), ('TSK_ACTCACHE_APP_ID', 'Activity Cache App Id'), \
                              ('TSK_ACTCACHE_PAYLOAD','Activity Cache Payload'), ('TSK_ACTCACHE_ACT_TYPE','Activity Type'), \
                              ('TSK_ACTCACHE_LOCAL_ONLY','Is Local Only'), ('TSK_ACTCACHE_ETAG','ETag'), \
                              ('TSK_ACTCACHE_PKGID_HASH','Package Id Hash'), ('TSK_ACTCACHE_PLAT_DEVID','Platform Device Id'), \
                              ('TSK_ACTCACHE_STATUS','Activity Cache Status'))

        self.dateColumns = (('TSK_ACTCACHE_ST_TIME','Start Time'), ('TSK_ACTCACHE_ENDTIME','End Time'), \
                            ('TSK_ACTCACHE_LAST_MOD','Last Modified Time'), ('TSK_ACTCACHE_EXP_TIME','Expiration Time'), \
                            ('TSK_ACTCACHE_CRT_CLOUD','Created In Cloud'), ('TSK_ACTCACHE_LAST_MOD_CLIENT','Last Modified On Client'), \
                            ('TSK_ACTCACHE_ORIG_LMOC','Original Last Modified On Client'))
        self.dateColumn = ('TSK_ACTCACHE_ST_TIME', 'TSK_ACTCACHE_ENDTIME', 'TSK_ACTCACHE_LAST_MOD', 'TSK_ACTCACHE_EXP_TIME', \
                           'TSK_ACTCACHE_CRT_CLOUD', 'TSK_ACTCACHE_LAST_MOD_CLIENT', 'TSK_ACTCACHE_ORIG_LMOC')
Beispiel #7
0
class PlasoIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(PlasoIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self.run_plaso = True
        self.path_to_storage_file = ""
        self.vss_option = ""
        self.vss_opt = ""

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        #Show parameters that are passed in
        self.log(
            Level.INFO,
            "Plaso directory ==> " + self.local_settings.getPlaso_Directory())
        self.log(
            Level.INFO, "Plaso Storage File ==> " +
            self.local_settings.getPlaso_Storage_File())
        self.exclude_file_sources = self.local_settings.getExclude_File_Sources(
        )
        if self.local_settings.getExclude_File_Sources():
            self.log(Level.INFO,
                     "Exclude File Information from import process")
        else:
            self.log(Level.INFO, "Include File Information in import process")

        # Create path to plaso storage file
        if self.local_settings.getRun_Plaso():
            self.log(Level.INFO, "This is a plaso run")
            self.run_plaso = True
            self.vss_opt = self.local_settings.getComboBox()
            if (self.vss_opt == "VSS Only"):
                self.vss_option = "--vss_only"
            elif (self.vss_opt == "No VSS"):
                self.vss_option = "--no_vss"
            else:
                self.vss_option = "--vss_stores"
        else:
            self.run_plaso = False
            self.log(Level.INFO, "This is a plaso import run")
            self.log(Level.INFO, self.local_settings.getPlaso_Storage_File())
            self.path_to_storage_file = self.local_settings.getPlaso_Storage_File(
            )

        # Check to see if the file to execute exists, if it does not then raise an exception and log error
        # data is taken from the UI
        self.path_to_exe_psort = os.path.join(
            self.local_settings.getPlaso_Directory(), "psort.exe")
        if not os.path.exists(self.path_to_exe_psort):
            raise IngestModuleException(
                "Psort File to Run/execute does not exist.")

        self.path_to_exe_log2t = os.path.join(
            self.local_settings.getPlaso_Directory(), "log2timeline.exe")
        if not os.path.exists(self.path_to_exe_log2t):
            raise IngestModuleException(
                "log2timeline File to Run/execute does not exist.")

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See:x http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        self.log(
            Level.INFO,
            "Starting to process, Just before call to parse_safari_history")

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Set the database to be read to the once created by the prefetch parser program
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, ".", "/")
        numFiles = len(files)

        # Create Event Log directory in temp directory, if it exists then continue on processing
        Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath()
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
            os.mkdir(Temp_Dir + "\Plaso")
        except:
            self.log(Level.INFO,
                     "Plaso Import Directory already exists " + Temp_Dir)

        # Run log2timeline against the selected images
        plaso_image = dataSource.getPaths()
        if self.local_settings.getRun_Plaso():
            for image in plaso_image:
                base_path = Temp_Dir + "\Plaso"
                self.log(Level.INFO, "image ==> " + str(image) + " <==")
                file_name = os.path.basename(image)
                self.log(Level.INFO, "file name ==> " + file_name + " <==")
                base_file_name = os.path.splitext(file_name)[0]
                self.log(Level.INFO,
                         "base_file_name ==> " + base_file_name + " <==")
                ext_file_name = os.path.splitext(file_name)[1]
                self.log(Level.INFO,
                         "ext_file_name ==> " + ext_file_name + " <==")
                log_file = os.path.join(base_path, base_file_name + ".log")
                self.log(Level.INFO, "log_file ==> " + log_file + " <==")
                storage_file = os.path.join(base_path,
                                            base_file_name + ".plaso")
                self.path_to_storage_file = storage_file
                self.log(Level.INFO,
                         "storage_file ==> " + str(storage_file) + " <==")
                self.log(Level.INFO, "VSS Option ==> " + self.vss_opt + " <==")
                if (self.vss_opt == "All VSS"):
                    self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_log2t + " --status_view none --partitions all --logfile " + \
                                         log_file + " --vss_stores all " + storage_file + " " + image)
                    pipe = Popen([self.path_to_exe_log2t, "--status_view", "none", "--partitions", "all", "--logfile", log_file, \
                                  "--vss_stores", "all", storage_file, image], stdout=PIPE, stderr=PIPE)
                else:
                    self.log(Level.INFO, "Running progxx ==> " + self.path_to_exe_log2t + " --status_view none --partitions all --logfile " + \
                                         log_file + " " + self.vss_option + " " + storage_file + " " + image)
                    pipe = Popen([self.path_to_exe_log2t, "--status_view", "none", "--partitions", "all", "--logfile", log_file, \
                                  self.vss_option, storage_file, image], stdout=PIPE, stderr=PIPE)
                out_text = pipe.communicate()[0]
                self.log(Level.INFO, "Output from run is ==> " + out_text)

        # Run the psort.exe program against the storage file to convert the storage file from native to SQLite
        base_path = Temp_Dir + "\Plaso"
        self.log(Level.INFO, "Base Path ==> " + base_path)
        #if self.local_settings.getImport_Plaso():
        file_name = os.path.basename(self.path_to_storage_file)
        self.log(Level.INFO, "File Name ==> " + file_name)
        base_file_name = os.path.splitext(file_name)[0]
        self.log(Level.INFO, "Base File Name ==> " + base_file_name)
        ext_file_name = os.path.splitext(file_name)[1]
        self.log(Level.INFO, "Ext File Name ==> " + ext_file_name)
        output_file = os.path.join(base_path, base_file_name + ".db3")
        self.log(Level.INFO, "Output File ==> " + output_file)
        storage_file = os.path.join(base_path, base_file_name + ".plaso")
        self.log(Level.INFO, "Storage File ==> " + storage_file)
        ##self.database_file = Temp_Dir + "\\Plaso\\Plaso.db3"
        if self.local_settings.getRun_Plaso():
            self.log(Level.INFO, "Running program ==> " + self.path_to_exe_psort + " -o 4n6time_sqlite -w " + output_file + " " + \
                     storage_file)
            pipe = Popen([
                self.path_to_exe_psort, "-o", "4n6time_sqlite", "-w",
                output_file, storage_file
            ],
                         stdout=PIPE,
                         stderr=PIPE)
            out_text = pipe.communicate()[0]
            self.log(Level.INFO, "Output from run is ==> " + out_text)
        else:
            self.log(Level.INFO, "Running program ==> " + self.path_to_exe_psort + " -o 4n6time_sqlite -w " + output_file + \
                     self.path_to_storage_file)
            pipe = Popen([
                self.path_to_exe_psort, "-o", "4n6time_sqlite", "-w",
                output_file, self.path_to_storage_file
            ],
                         stdout=PIPE,
                         stderr=PIPE)
            out_text = pipe.communicate()[0]
            self.log(Level.INFO, "Output from run is ==> " + out_text)

        plaso_db_file = output_file
        plaso_db_dir = Temp_Dir + "\Plaso"
        self.log(Level.INFO, "Plaso DB File ==> " + plaso_db_file)
        for file in files:
            abstract_file_info = skCase.getAbstractFileById(file.getId())
            #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info))

        # Add dervived file
        file = skCase.addDerivedFile(output_file, plaso_db_dir, os.path.getsize(plaso_db_file), + \
                                     0, 0, 0, 0, True, abstract_file_info, "", "", "", "", TskData.EncodingType.NONE)

        self.log(Level.INFO, "Derived File ==> " + str(file))

        # Create the Attributes for plaso
        try:
            attID_source = skCase.addArtifactAttributeType(
                "TSK_PLASO_SOURCE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Source")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso Source ==> ")
        try:
            attID_sourcetype = skCase.addArtifactAttributeType(
                "TSK_PLASO_SOURCE_TYPE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Source Type")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso Source Type ==> ")
        try:
            attID_type = skCase.addArtifactAttributeType(
                "TSK_PLASO_TYPE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Type")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Plaso Type ==> ")
        try:
            attID_desc = skCase.addArtifactAttributeType(
                "TSK_PLASO_DESCRIPTION",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Description")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso Description ==> ")
        try:
            attID_filename = skCase.addArtifactAttributeType(
                "TSK_PLASO_FILENAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso File Name")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso File Name ==> ")
        try:
            attID_format = skCase.addArtifactAttributeType(
                "TSK_PLASO_FORMAT",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Format")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso Format ==> ")
        try:
            attID_extra = skCase.addArtifactAttributeType(
                "TSK_PLASO_EXTRA",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso Extra")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Plaso Extra ==> ")
        try:
            attID_vss_num = skCase.addArtifactAttributeType(
                "TSK_PLASO_VSS_STORE_NUM",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Plaso VSS Store Num")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Plaso VSS Store Num ==> ")

        # Get the artifact and attributes
        attID_source = skCase.getAttributeType("TSK_PLASO_SOURCE")
        artID_sourcetype = skCase.getAttributeType("TSK_PLASO_SOURCE_TYPE")
        attID_type = skCase.getAttributeType("TSK_PLASO_TYPE")
        attID_desc = skCase.getAttributeType("TSK_PLASO_DESCRIPTION")
        self.log(Level.INFO, "Description Attribute ==> " + str(attID_desc))
        attID_filename = skCase.getAttributeType("TSK_PLASO_FILENAME")
        attID_format = skCase.getAttributeType("TSK_PLASO_FORMAT")
        attID_extra = skCase.getAttributeType("TSK_PLASO_EXTRA")
        attID_vss_num = skCase.getAttributeType("TSK_PLASO_VSS_STORE_NUM")

        # Open the DB using JDBC
        lclDbPath = output_file
        self.log(
            Level.INFO,
            "Path the Plaso Import file database file created ==> " +
            lclDbPath)
        try:
            Class.forName("org.sqlite.JDBC").newInstance()
            dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath)
        except SQLException as e:
            self.log(
                Level.INFO, "Could not open database file (not SQLite) " +
                output_file + " (" + e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        # Query the l2t_sources table to include or exclude FILES based on user response
        try:
            stmt = dbConn.createStatement()
            l2t_sources_sql = "select sources from l2t_sources"
            if self.exclude_file_sources:
                l2t_sources_sql = l2t_sources_sql + " where sources != 'FILE'"
            self.log(Level.INFO, l2t_sources_sql)
            resultSet = stmt.executeQuery(l2t_sources_sql)
            self.log(Level.INFO, "query l2t_sources table")
        except SQLException as e:
            self.log(
                Level.INFO, "Error querying database for l2t_sources table (" +
                e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        # Cycle through each row and create artifacts
        while resultSet.next():
            # Create the safari last session artifact
            try:
                self.log(
                    Level.INFO, "Begin Create New Artifacts ==> " +
                    resultSet.getString("sources"))
                artID_art = skCase.addArtifactType( "TSK_PLASO_" + resultSet.getString("sources"), "Plaso Source " + \
                                                   resultSet.getString("sources"))
            except:
                self.log(
                    Level.INFO,
                    "Artifacts Creation Error, some artifacts may not exist now. ==> "
                )

            # Get the artifact and attributes
            artID_art = skCase.getArtifactTypeID(
                "TSK_PLASO_" + resultSet.getString("sources"))
            artID_art_evt = skCase.getArtifactType(
                "TSK_PLASO_" + resultSet.getString("sources"))

            try:
                stmt = dbConn.createStatement()
                log2timeline_sql = "select source, sourcetype, type, description, filename, format, extra, " + \
                                   " strftime('%s',datetime) 'datetime', vss_store_number, url " + \
                                   " from log2timeline where source = '" + resultSet.getString("sources") + "';"
                self.log(Level.INFO, log2timeline_sql)
                resultSet2 = stmt.executeQuery(log2timeline_sql)
                self.log(Level.INFO, "query lastsession table")
            except SQLException as e:
                self.log(
                    Level.INFO,
                    "Error querying database for log2timeline table (" +
                    e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

        # Cycle through each row and create artifacts
            while resultSet2.next():
                try:
                    art = file.newArtifact(artID_art)
                    #self.log(Level.INFO, "Inserting attribute source ==> 2")
                    art.addAttribute(
                        BlackboardAttribute(
                            artID_sourcetype,
                            PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("source")))
                    #self.log(Level.INFO, "Inserting attribute sourcetype")
                    art.addAttribute(
                        BlackboardAttribute(
                            artID_sourcetype,
                            PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("sourcetype")))
                    #self.log(Level.INFO, "Inserting attribute Type")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_type, PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("type")))
                    #self.log(Level.INFO, "Inserting attribute description")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_desc, PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("description")))
                    #self.log(Level.INFO, "Inserting attribute filename")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_filename,
                            PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("filename")))
                    #self.log(Level.INFO, "Inserting attribute format")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_format, PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("format")))
                    #self.log(Level.INFO, "Inserting attribute extra")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_extra, PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("extra")))
                    #self.log(Level.INFO, "Inserting attribute Date/Time")
                    art.addAttribute(
                        BlackboardAttribute(
                            BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.
                            getTypeID(), PlasoIngestModuleFactory.moduleName,
                            resultSet2.getInt("datetime")))
                    #self.log(Level.INFO, "Inserting attribute vss_store_number")
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_vss_num, PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("vss_store_number")))
                    #self.log(Level.INFO, "Inserting attribute URL")
                    art.addAttribute(
                        BlackboardAttribute(
                            BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.
                            getTypeID(), PlasoIngestModuleFactory.moduleName,
                            resultSet2.getString("URL")))

                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error getting values from the Log2timeline table (" +
                        e.getMessage() + ")")

            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(PlasoIngestModuleFactory.moduleName,
                                artID_art_evt, None))

        stmt.close()
        dbConn.close()

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "PlasoSettings",
            " PlasoSettings Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
 def __init__(self, settings):
     self.context = None
     self.local_settings = settings
     self._logger = Logger.getLogger(self.__class__.__name__)
     self._logger.log(Level.SEVERE, "Starting of plugin")
Beispiel #9
0
class GamesaveIngestModule(FileIngestModule):

    _logger = Logger.getLogger(GamesaveIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def startUp(self, context):
        self.filesFound = 0

        skCase = Case.getCurrentCase().getSleuthkitCase()
        # Create the artifact type, if it exists then catch the error
        try:
            self.log(Level.INFO, "Begin Create New Artifact")
            artID_ns_gs = skCase.addArtifactType(
                "TSK_ART_NS_GS", "Nintendo Switch - Game Saves")
        except:
            self.log(Level.INFO, "Artifact Creation Error: NS - Game Saves")

        # Create the attribute type, if it exists then catch the error
        try:
            attID_ns_gid = skCase.addArtifactAttributeType(
                'TSK_ATT_NS_GAME',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Game")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NS Game")

        try:
            attID_ns_ts = skCase.addArtifactAttributeType(
                'TSK_ATT_NS_TS',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Last Saved")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NS TS")

        try:
            attID_ns_info = skCase.addArtifactAttributeType(
                'TSK_ATT_NS_INFO',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Game Information")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NS INFO")

        path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                            'game_ids.json')

        if not os.path.exists(path):
            raise IngestModuleException(
                "game_ids was not found in module folder")

        with open(path, "r") as data_file:
            self.gids = json.load(data_file)

        pass

    def process(self, file):

        skCase = Case.getCurrentCase().getSleuthkitCase()

        artID_ns_gs_id = skCase.getArtifactTypeID("TSK_ART_NS_GS")
        artID_ns_gs = skCase.getArtifactType("TSK_ART_NS_GS")

        attID_ns_gid = skCase.getAttributeType("TSK_ATT_NS_GAME")
        attID_ns_ts = skCase.getAttributeType("TSK_ATT_NS_TS")
        attID_ns_info = skCase.getAttributeType("TSK_ATT_NS_INFO")

        # Skip non-files
        if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
                or
            (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
                or (file.isFile() is False)):
            return IngestModule.ProcessResult.OK

        self.log(Level.INFO,
                 "Found a Bootup timestamp: " + file.getParentPath())

        # Currently searches through all save files, regardless of which partition is used. Currently runs very quickly so does not need to be optimised at this point.
        if file.getParentPath().upper() == "/SAVE/":

            self.log(Level.INFO, "Found a game save: " + file.getName())
            self.filesFound += 1

            buf = zeros(8, 'b')
            file.read(buf, 1752, 8)

            b_gid = binascii.hexlify(buf)

            str_gid = "".join(
                reversed([b_gid[i:i + 2]
                          for i in range(0, len(b_gid), 2)])).upper()

            if str_gid in self.gids:

                timestamp = file.getMtimeAsDate()
                game = self.gids[str_gid]
                more_info = "https://ec.nintendo.com/apps/%s/GB" % str_gid

                # Don't add to blackboard if the artifact already exists
                artifactList = file.getArtifacts(artID_ns_gs_id)
                for artifact in artifactList:
                    dupe_test = artifact.getAttribute(attID_ns_gid)
                    if dupe_test:
                        return IngestModule.ProcessResult.OK

                art = file.newArtifact(artID_ns_gs_id)

                art.addAttribute(
                    BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.
                        getTypeID(), GamesaveIngestModuleFactory.moduleName,
                        "Nintendo Switch - Game Save"))
                art.addAttribute(
                    BlackboardAttribute(attID_ns_gid,
                                        GamesaveIngestModuleFactory.moduleName,
                                        game))
                art.addAttribute(
                    BlackboardAttribute(attID_ns_ts,
                                        GamesaveIngestModuleFactory.moduleName,
                                        timestamp))
                art.addAttribute(
                    BlackboardAttribute(attID_ns_info,
                                        GamesaveIngestModuleFactory.moduleName,
                                        more_info))

                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(GamesaveIngestModuleFactory.moduleName,
                                    artID_ns_gs, None))

            return IngestModule.ProcessResult.OK

    def shutDown(self):
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA,
            GamesaveIngestModuleFactory.moduleName,
            str(self.filesFound) + " game saves found")
        _ = IngestServices.getInstance().postMessage(message)
Beispiel #10
0
    def __init__(self):
        self.tags_selected = []
        self.moduleName = "SPAI's HTML Model for Autopsy Report"

        self._logger = Logger.getLogger(self.moduleName)
class ISmartAlarmIngestModule(DataSourceIngestModule):
    _logger = Logger.getLogger(ISmartAlarmIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    # TODO: Add any setup code that you need here.
    def startUp(self, context):

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException("Oh No!")

        # Settings
        self.log(Level.INFO, str(self.local_settings))

        # Get Case
        case = Case.getCurrentCase().getSleuthkitCase()

        # Add custom Artifact to blackboard
        # (cf: https://github.com/markmckinnon/Autopsy-Plugins/blob/master/CCM_RecentlyUsedApps/CCM_RecentlyUsedApps.py)
        # iSmartAlarm Specific Artifacts
        self.ismart_artifacts(case)

        # Generic Login Artifacts & Attributes
        esc_generic_artifacts(self, case)

        self.context = context

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/4.4/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    # TODO: Add your analysis code in here.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Use blackboard class to index blackboard artifacts for keyword search
        # blackboard = Case.getCurrentCase().getServices().getBlackboard() #we're not using indexing

        # Get case
        case = Case.getCurrentCase().getSleuthkitCase()

        # For our example, we will use FileManager to get all
        # files with the word "test"
        # in the name and then count and read them
        # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html
        fileManager = Case.getCurrentCase().getServices().getFileManager()

        db_files = fileManager.findFiles(
            dataSource,
            "iSmartAlarm.DB") if self.local_settings.get_parse_db() else []
        if self.local_settings.get_parse_settings():
            # Yes, Alerm, they have a typo in the file
            setting_file = fileManager.findFiles(dataSource,
                                                 "iSmartAlermData.xml")
            mqtt_files = fileManager.findFiles(dataSource,
                                               "MQTT_Message_Service.xml%")
            reg_file = fileManager.findFiles(dataSource, "REG_KEY.xml")
        else:
            setting_file, mqtt_files, reg_file = [], [], []

        num_files = len(db_files) + len(setting_file) + len(mqtt_files) + len(
            reg_file)

        self.log(Level.INFO, "found " + str(num_files) + " files")
        progressBar.switchToDeterminate(num_files)
        file_count = 0

        # Parse DB
        if self.local_settings.get_parse_db():
            try:
                for file in db_files:

                    # Check if the user pressed cancel while we were busy
                    if self.context.isJobCancelled():
                        return IngestModule.ProcessResult.OK

                    self.log(Level.INFO, "Processing file: " + file.getName())
                    file_count += 1

                    # Make an artifact on the blackboard.
                    # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of
                    # artifact.  Refer to the developer docs for other examples.
                    art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.
                                           TSK_INTERESTING_FILE_HIT)
                    att = BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
                        ISmartAlarmIngestModuleFactory.moduleName,
                        "iSmartAlarm")
                    art.addAttribute(att)

                    # Skip file is it is the journal file
                    if "journal" in file.getName():
                        continue

                    # try:
                    #     # index the artifact for keyword search
                    #     blackboard.indexArtifact(art)
                    # except Blackboard.BlackboardException as e:
                    #     self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName())

                    # Save the DB to disk
                    lcl_db_path = os.path.join(
                        Case.getCurrentCase().getTempDirectory(),
                        str(file.getId()) + ".db")
                    ContentUtils.writeToFile(file, File(lcl_db_path))

                    try:
                        Class.forName("org.sqlite.JDBC").newInstance()
                        conn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                           lcl_db_path)
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Could not open database file (not SQLite) recentlyUsedApps.db3 ("
                            + e.getMessage() + ")")
                        return IngestModule.ProcessResult.OK

                    self.events = []
                    self.devices = {}

                    try:
                        stmt = conn.createStatement()
                        ipu_dairy_sql = 'SELECT date, action, IPUID, logType, sensorName, operator, profileName FROM TB_IPUDairy'
                        # self.log(Level.INFO, ipu_dairy_sql)
                        result = stmt.executeQuery(ipu_dairy_sql)
                        # self.log(Level.INFO, "query TB_IPUDairy table")
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Error querying database for TB_IPUDairy table (" +
                            e.getMessage() + ")")
                        return IngestModule.ProcessResult.OK

                    while result.next():
                        event = ISmartEventDB()
                        row = [
                            result.getLong('date'),
                            result.getString('action'),
                            result.getString('IPUID'),
                            result.getInt('logType'),
                            result.getString('sensorName'),
                            result.getString('operator'),
                            result.getString('profileName'),
                        ]
                        event.parse_ipu(row, self)
                        self.events.append(event)

                    try:
                        stmt = conn.createStatement()
                        sensors_diary_sql = 'SELECT sensorID, date, action, model, operator, name, logtype FROM TB_SensorDairy;'
                        # self.log(Level.INFO, sensors_diary_sql)
                        result = stmt.executeQuery(sensors_diary_sql)
                        # self.log(Level.INFO, "query TB_SensorDiary table")
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Error querying database for TB_SensorDiary table ("
                            + e.getMessage() + ")")
                        return IngestModule.ProcessResult.OK

                    while result.next():
                        event = ISmartEventDB()
                        row = [
                            result.getLong('date'),
                            result.getString('sensorID'),
                            result.getString('action'),
                            result.getString('model'),
                            result.getString('operator'),
                            result.getString('name'),
                            result.getString('logtype'),
                        ]
                        event.parse_sensors(row, self)
                        self.events.append(event)

                    art_type_id = case.getArtifactTypeID("ESC_IOT_ISMARTALARM")
                    art_type = case.getArtifactType("ESC_IOT_ISMARTALARM")

                    for event in self.events:
                        # Artifact
                        art = file.newArtifact(art_type_id)
                        # Attributes
                        att_event_name_id = case.getAttributeType(
                            "ESC_IOT_ISMART_EVENT_NAME")
                        att_event_date_id = case.getAttributeType(
                            "ESC_IOT_ISMART_EVENT_DATE")
                        att_event_type_id = case.getAttributeType(
                            "ESC_IOT_ISMART_EVENT_TYPE")
                        att_event_device_id = case.getAttributeType(
                            "ESC_IOT_ISMART_EVENT_DEVICE")
                        att_event_device_type_id = case.getAttributeType(
                            "ESC_IOT_ISMART_EVENT_DEVICE_TYPE")

                        att_event_name = BlackboardAttribute(
                            att_event_name_id,
                            ISmartAlarmIngestModuleFactory.moduleName,
                            event.name)
                        att_event_date = BlackboardAttribute(
                            att_event_date_id,
                            ISmartAlarmIngestModuleFactory.moduleName,
                            int(mktime(event.timestamp.timetuple())))
                        att_event_type = BlackboardAttribute(
                            att_event_type_id,
                            ISmartAlarmIngestModuleFactory.moduleName,
                            event.event_type)
                        att_event_device = BlackboardAttribute(
                            att_event_device_id,
                            ISmartAlarmIngestModuleFactory.moduleName,
                            event.device.name)
                        att_event_device_type = BlackboardAttribute(
                            att_event_device_type_id,
                            ISmartAlarmIngestModuleFactory.moduleName,
                            event.device.device_type)

                        art.addAttribute(att_event_name)
                        art.addAttribute(att_event_date)
                        art.addAttribute(att_event_type)
                        art.addAttribute(att_event_device)
                        art.addAttribute(att_event_device_type)

                    IngestServices.getInstance().fireModuleDataEvent(
                        ModuleDataEvent(
                            ISmartAlarmIngestModuleFactory.moduleName,
                            art_type, None))
                    # Update the progress bar
                    progressBar.progress(file_count)

                    # Clean Up DB
                    stmt.close()
                    conn.close()
                    os.remove(lcl_db_path)
            except Exception:
                self.log(Level.INFO,
                         "There was an error parsing the ismartalarm DB")

        # Settings & MQTT
        if self.local_settings.get_parse_settings():
            # Settings File
            for file in setting_file:

                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                self.log(Level.INFO, "Processing file: " + file.getName())
                file_count += 1

                # Make an artifact on the blackboard.
                # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of
                # artifact.  Refer to the developer docs for other examples.
                art = file.newArtifact(
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
                att = BlackboardAttribute(
                    BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
                    ISmartAlarmIngestModuleFactory.moduleName, "iSmartAlarm")
                art.addAttribute(att)

                # Write to file (any way to contour this?)
                lcl_setting_path = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getId()) + ".xml")
                ContentUtils.writeToFile(file, File(lcl_setting_path))

                a = minidom.parse(lcl_setting_path)
                tags = a.getElementsByTagName("string")
                ismart_logins = {}
                for tag in tags:
                    if tag.getAttribute('name') == "phoneNum":
                        ismart_logins['username'] = str(tag.firstChild.data)
                    elif tag.getAttribute('name') == "password":
                        ismart_logins['password'] = str(tag.firstChild.data)
                    elif tag.getAttribute('name') == "countryCode":
                        ismart_logins['country_code'] = str(
                            tag.firstChild.data)

                art_type_id = case.getArtifactTypeID("ESC_GENERIC_LOGIN")
                art_type = case.getArtifactType("ESC_GENERIC_LOGIN")

                # Artifact
                art = file.newArtifact(art_type_id)
                # Attributes
                att_login_username_id = case.getAttributeType(
                    "ESC_GENERIC_LOGIN_USERNAME")
                att_login_secret_id = case.getAttributeType(
                    "ESC_GENERIC_LOGIN_SECRET")
                att_login_secret_type_id = case.getAttributeType(
                    "ESC_GENERIC_LOGIN_SECRET_TYPE")
                att_login_service_id = case.getAttributeType(
                    "ESC_GENERIC_LOGIN_SERVICE")
                att_login_remarks_id = case.getAttributeType(
                    "ESC_GENERIC_LOGIN_REMARKS")

                att_login_username = BlackboardAttribute(
                    att_login_username_id,
                    ISmartAlarmIngestModuleFactory.moduleName,
                    ismart_logins['username'])
                att_login_secret = BlackboardAttribute(
                    att_login_secret_id,
                    ISmartAlarmIngestModuleFactory.moduleName,
                    ismart_logins['password'])
                att_login_secret_type = BlackboardAttribute(
                    att_login_secret_type_id,
                    ISmartAlarmIngestModuleFactory.moduleName, "Password")
                att_login_service = BlackboardAttribute(
                    att_login_service_id,
                    ISmartAlarmIngestModuleFactory.moduleName, "iSmartAlarm")
                att_login_remarks = BlackboardAttribute(
                    att_login_remarks_id,
                    ISmartAlarmIngestModuleFactory.moduleName,
                    "Country Code: %s" % ismart_logins['country_code'])

                art.addAttribute(att_login_username)
                art.addAttribute(att_login_secret)
                art.addAttribute(att_login_secret_type)
                art.addAttribute(att_login_service)
                art.addAttribute(att_login_remarks)

                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(ISmartAlarmIngestModuleFactory.moduleName,
                                    art_type, None))

                # Clean Up
                os.remove(lcl_setting_path)

                progressBar.progress(file_count)

            # MQTT Files
            for file in mqtt_files:

                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                self.log(Level.INFO, "Processing file: " + file.getName())
                file_count += 1

                # Make an artifact on the blackboard.
                # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of
                # artifact.  Refer to the developer docs for other examples.
                art = file.newArtifact(
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
                att = BlackboardAttribute(
                    BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
                    ISmartAlarmIngestModuleFactory.moduleName, "iSmartAlarm")
                art.addAttribute(att)
                progressBar.progress(file_count)

            # REG_KEY File
            for file in reg_file:

                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                self.log(Level.INFO, "Processing file: " + file.getName())
                file_count += 1

                # Make an artifact on the blackboard.
                # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of
                # artifact.  Refer to the developer docs for other examples.
                art = file.newArtifact(
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
                att = BlackboardAttribute(
                    BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
                    ISmartAlarmIngestModuleFactory.moduleName, "iSmartAlarm")
                art.addAttribute(att)
                progressBar.progress(file_count)

        # FINISHED!
        # Post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "iSmartAlarm Analysis",
                                              "Found %d files" % file_count)
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK

    def ismart_artifacts(self, case):
        try:
            self.log(Level.INFO,
                     "Begin Create New Artifacts ==> ESC_IOT_ISMARTALARM")
            case.addArtifactType("ESC_IOT_ISMARTALARM", "iSmart Alarm Events")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, artifact ESC_IOT_ISMARTALARM exists."
            )

        # Add Custom attributes to blackboard
        # iSmartAlarm Specific Attributes
        try:
            case.addArtifactAttributeType(
                "ESC_IOT_ISMART_EVENT_NAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Event")
        except:
Beispiel #12
0
class IphoneIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(IphoneIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    def startUp(self, context):
        self.context = context

        pass

    def process(self, dataSource, progressBar):

        PostBoard = IngestServices.getInstance()
        progressBar.switchToIndeterminate()
        ccase = Case.getCurrentCase().getSleuthkitCase()
        blackboard = Case.getCurrentCase().getServices().getBlackboard()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "Info.plist")
        numFiles = len(files)
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "Connected iPhone Analyzer",
            "About to analyze " + str(numFiles) + " files")
        PostBoard.postMessage(message)
        progressBar.switchToDeterminate(numFiles)

        try:
            artifact_name = "TSK_IPHONE"
            artifact_desc = "Connected iPhone Analyzer"

            artID_iphone = ccase.addArtifactType(artifact_name, artifact_desc)
            artID_iphone_evt = ccase.getArtifactType(artifact_name)
            attribute_name = "TSK_IPHONE_DEVICENAME"
            attribute_name2 = "TSK_IPHONE_PRODUCTTYPE"
            attribute_name3 = "TSK_IPHONE_BACKUPDATE"
            attribute_name4 = "TSK_IPHONE_PHONENUMBER"
            attribute_name5 = "TSK_IPHONE_SERIALNUMBER"
            attribute_name6 = "TSK_IPHONE_IMEI"
            attribute_name7 = "TSK_IPHONE_ICCID"
            attribute_name8 = "TSK_IPHONE_BUILD"
            attID_ex1 = ccase.addArtifactAttributeType(
                attribute_name,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Name")
            attID_ex2 = ccase.addArtifactAttributeType(
                attribute_name2,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Type")
            attID_ex3 = ccase.addArtifactAttributeType(
                attribute_name3,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "BackupDate")
            attID_ex4 = ccase.addArtifactAttributeType(
                attribute_name4,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Phone Number")
            attID_ex5 = ccase.addArtifactAttributeType(
                attribute_name5,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Serial")
            attID_ex6 = ccase.addArtifactAttributeType(
                attribute_name6,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "IMEI")
            attID_ex7 = ccase.addArtifactAttributeType(
                attribute_name7,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "ICCID")
            attID_ex8 = ccase.addArtifactAttributeType(
                attribute_name8,
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "BUILD")
        except:
            a = 1

        fileCount = 0
        for file in files:
            fileCount += 1
            progressBar.progress(fileCount)
            progressBar.progress("Connected iPhone Analyzer")
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK
            self.log(Level.INFO, "++++++Processing file: " + file.getName())

            lclPlistPath = os.path.join(
                Case.getCurrentCase().getTempDirectory(),
                str(file.getId()) + ".plist")
            ContentUtils.writeToFile(file, File(lclPlistPath))
            try:
                plist_file = open(lclPlistPath, 'r')
                lines = plist_file.readlines()

                BUILDVERSION = ""
                devicename = ""
                GUID = ""
                ICCID = ""
                IMEI = ""
                LASTBACKUP = ""
                PHONENUMBER = ""
                PRODUCTTYPE = ""
                SERIALNUMBER = ""
                if "<!DOCTYPE plist PUBLIC " in lines[
                        1] and "Apple//DTD PLIST 1.0//EN" in lines[
                            1] and "http://www.apple.com/DTDs/PropertyList" in lines[
                                1]:
                    IPHONE_PLIST = 0
                    counter = 0
                    for i in lines:
                        if "key>Build Version</key>" in lines[counter]:
                            IPHONE_PLIST = 1
                        else:
                            a = 1 + 1
                        counter += 1

                    if IPHONE_PLIST == 1:
                        j = 0
                        while j != counter:

                            if "Build Version" in lines[j]:
                                BUILDVERSION = Strip(lines[j + 1])
                                print "BUILD:" + BUILDVERSION
                                j += 1
                            if "<key>Device Name</key>" in lines[j]:
                                devicename = Strip(lines[j + 1])
                                j += 1
                            if "<key>GUID</key>" in lines[j]:
                                GUID = Strip(lines[j + 1])
                                j += 1
                            if "<key>ICCID</key>" in lines[j]:
                                ICCID = Strip(lines[j + 1])
                                j += 1
                            if "<key>IMEI</key>" in lines[j]:
                                IMEI = Strip(lines[j + 1])
                                j += 1
                            if "<key>Last Backup Date</key>" in lines[j]:
                                LASTBACKUP = Strip(lines[j + 1])
                                j += 1
                            if "<key>Phone Number</key>" in lines[j]:
                                PHONENUMBER = Strip(lines[j + 1])
                                j += 1
                            if "<key>Product Type</key>" in lines[j]:
                                PRODUCTTYPE = Strip(lines[j + 1])
                                j += 1
                            if "ey>Serial Number</key>" in lines[j]:
                                SERIALNUMBER = Strip(lines[j + 1])
                                j += 1
                            j += 1

                        artifact_name = "TSK_IPHONE"
                        artifact_desc = "Connected iPhone Analyzer"
                        artID_iphone_evt = ccase.getArtifactType(artifact_name)
                        artID_iphone = ccase.getArtifactTypeID(artifact_name)
                        art = file.newArtifact(artID_iphone)
                        attID_ex1 = ccase.getAttributeType(
                            "TSK_IPHONE_DEVICENAME")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName,
                                devicename))
                        attID_ex1 = ccase.getAttributeType(
                            "TSK_IPHONE_PRODUCTTYPE")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName,
                                PRODUCTTYPE))
                        attID_ex1 = ccase.getAttributeType(
                            "TSK_IPHONE_BACKUPDATE")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName,
                                LASTBACKUP))
                        attID_ex1 = ccase.getAttributeType(
                            "TSK_IPHONE_PHONENUMBER")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName,
                                PHONENUMBER))
                        attID_ex1 = ccase.getAttributeType(
                            "TSK_IPHONE_SERIALNUMBER")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName,
                                SERIALNUMBER))
                        attID_ex1 = ccase.getAttributeType("TSK_IPHONE_IMEI")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName, IMEI))
                        attID_ex1 = ccase.getAttributeType("TSK_IPHONE_ICCID")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName, ICCID))
                        attID_ex1 = ccase.getAttributeType("TSK_IPHONE_BUILD")
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ex1,
                                IphoneIngestModuleFactory.moduleName, BUILD))
                        PostBoard.fireModuleDataEvent(ModuleDataEvent(IphoneIngestModuleFactory.moduleName, \
                            artID_iphone_evt, None))
                        IPHONE_PLIST = 0
                        plist_file.close()
                    else:

                        plist_file.close()
                else:

                    plist_file.close()
            except:

                plist_file.close()

            os.remove(lclPlistPath)

        # After all Plist files, post a message to the ingest messages in box.
        if numFiles == 0:
            message = IngestMessage.createMessage(
                IngestMessage.MessageType.DATA, "Connected iPhone Analyzer",
                "Nothing to analyze ")
            PostBoard.postMessage(message)
        else:
            message = IngestMessage.createMessage(
                IngestMessage.MessageType.DATA, "Connected iPhone Analyzer",
                "Analyzed %d files" % fileCount)
            PostBoard.postMessage(message)
        return IngestModule.ProcessResult.OK
class AttomicWalletIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(AttomicWalletIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")
        self.fbPeopleDict = {}
        self.chatMessages = []
        self.fbOwnerId = 0

    def startUp(self, context):
        self.context = context
        pass
        
    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()
        
        # get current case and the store.vol abstract file information
        skCase = Case.getCurrentCase().getSleuthkitCase();
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        connectionFiles = fileManager.findFiles(dataSource, "Connection.log%", ".atomic")
        numFiles = len(connectionFiles)
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0;

		# Create Atomic Wallet directory in temp directory, if it exists then continue on processing		
        temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Atomic_Wallet")
        try:
		    os.mkdir(temporaryDirectory)
        except:
            pass
			
        # get and process connections
        for file in connectionFiles:
            if "-slack" not in file.getName():
                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                fileCount += 1

                # Save the file locally. Use file id as name to reduce collisions
                extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName())
                ContentUtils.writeToFile(file, File(extractedFile))
                self.processConnectionLogs(extractedFile, file)
                try:
                    os.remove(extractedFile)
                except:
                    self.log(Level.INFO, "Failed to remove file " + extractedFile)

            else:
                extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName())
                try:
                    os.remove(extractedFile)
                except:
                    self.log(Level.INFO, "Failed to remove file " + extractedFile)


        # Get and process history file            
        historyFiles = fileManager.findFiles(dataSource, "history.json", ".atomic")
        numFiles = len(historyFiles)

        for file in historyFiles:	
            if "-slack" not in file.getName():
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                #self.log(Level.INFO, "Processing file: " + file.getName())
                fileCount += 1

                # Save the file locally. Use file id as name to reduce collisions
                extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName())
                ContentUtils.writeToFile(file, File(extractedFile))
                self.processHistory(extractedFile, file)
                try:
                    os.remove(extractedFile)
                except:
                    self.log(Level.INFO, "Failed to remove file " + extractedFile)
            else:
                extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName())
                try:
                    os.remove(extractedFile)
                except:
                    self.log(Level.INFO, "Failed to remove file " + extractedFile)

        try:
           shutil.rmtree(temporaryDirectory)		
        except:
		   self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory)
                
        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
            "Atomic Wallet", " Atomic Wallet Has Been Analyzed " )
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK                
		
    def processConnectionLogs(self, logFile, abstractFile):
    
       moduleName = AttomicWalletIngestModuleFactory.moduleName
    
       connectTimes = []
       disconnectTimes = []
       with open(logFile) as file:
           for logLine in file:
               if "connected" in logLine:
                   logLineList = logLine.split(" ")
                   connectTimes.append(int(self.getDateTime(logLineList[0], logLineList[1])))
               elif "Disconnect" in logLine:
                   logLineList = logLine.split(" ")
                   disconnectTimes.append(int(self.getDateTime(logLineList[0], logLineList[1])))
               else:
                   pass
       try:
           artId = self.createArtifact("TSK_ATOMIC_WALLET_APP_TIMES", "Atomic Wallet Connect/Disconnect Times")
           for connTime in connectTimes:
               artifact = abstractFile.newArtifact(artId)
               attributes = ArrayList()
               attId = self.createAttribute("TSK_ATOMIC_WALLET_CONNECTION_TYPE", "string", "Atomic Wallet Connection Type")
               attributes.add(BlackboardAttribute(attId, moduleName, "Connect"))
               attId = self.createAttribute("TSK_ATOMIC_WALLET_TIME", "datetime", "Atomic Wallet Time")
               attributes.add(BlackboardAttribute(attId, moduleName, connTime))
               try:
                   artifact.addAttributes(attributes)
               except:
                   self.log(Level.INFO, "Error adding attribute to artifact")
               try:
                   self.indexArtifact(artifact)
               except:
                   self.log(Level.INFO, "Error indexing artifact")
           for disTime in disconnectTimes:
               artifact = abstractFile.newArtifact(artId)
               attributes = ArrayList()
               attId = self.createAttribute("TSK_ATOMIC_WALLET_CONNECTION_TYPE", "string", "Atomic Wallet Connection Type")
               attributes.add(BlackboardAttribute(attId, moduleName, "Disconnect"))
               attId = self.createAttribute("TSK_ATOMIC_WALLET_TIME", "datetime", "Atomic Wallet Time")
               attributes.add(BlackboardAttribute(attId, moduleName, disTime))
               try:
                   artifact.addAttributes(attributes)
               except:
                   self.log(Level.INFO, "Error adding attribute to artifact")
               try:
                   self.indexArtifact(artifact)
               except:
                   self.log(Level.INFO, "Error indexing artifact")
       except:
           self.log(Level.INFO, "Error adding attribute")

    def processHistory(self, historyFile, abstractFile):

       moduleName = AttomicWalletIngestModuleFactory.moduleName

       histTrans = []
       with open (historyFile) as file:
           for historyLine in file:
               jsonData = json.loads(historyLine)
               for transaction in jsonData:
                  transactionDict = {}
                  for trans in transaction:
                      if isinstance(transaction[trans],dict):
                          header = transaction[trans]
                          newHeader = header.keys()
                          for head in header:
                              transactionDict["transaction_" + trans + "_" + head] = header[head]
                      else:
                          if 'time' in trans:
                             transactionDict["transaction_" + str(trans) + "_UTC"] = transaction[trans]
                          else:
                             transactionDict["transaction_" + str(trans)] = transaction[trans]

                  histTrans.append(transactionDict)
       #self.log(Level.INFO, str(histTrans))
       try:
           artId = self.createArtifact("TSK_ATOMIC_WALLET_TRANS_HIST", "Atomic Wallet Transaction History")
           for history in histTrans:
               hKeys = history.keys()
               attributes = ArrayList()
               artifact = abstractFile.newArtifact(artId)
               for key in hKeys:
                   value = history[key]
                   title = str(key).replace("_"," ").title()
                   attributeName = "TSK_" + str(key).upper()
                   if type(value) == int:
                       if "UTC" in attributeName:
                           attId = self.createAttribute(attributeName, "datetime", title)
                           attributes.add(BlackboardAttribute(attId, moduleName, value))
                       else:
                           attId = self.createAttribute(attributeName, "string", title)
                           attributes.add(BlackboardAttribute(attId, moduleName, str(value)))
                   elif type(value) == dict:
                       pass
                   else:
                       attId = self.createAttribute(attributeName, "string", title)
                       attributes.add(BlackboardAttribute(attId, moduleName, str(value)))
               try:
                   artifact.addAttributes(attributes)
               except:
                   self.log(Level.INFO, "Error adding attribute to artifact")
               try:
                   self.indexArtifact(artifact)
               except:
                   self.log(Level.INFO, "Error indexing artifact")
       except:
           self.log(Level.INFO, "Error adding attribute")
               
    def getDateTime(self, date, time):
    
        dateString = date + " " + time
        timeStamp = strptime(dateString, '%Y-%m-%d %H:%M:%S.%f')
        return mktime(timeStamp)
    
    def createAttribute(self, attributeName, attributeType, attributeDescription):
        
        skCase = Case.getCurrentCase().getSleuthkitCase()
        
        try:
            if "string" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription)
                return skCase.getAttributeType(attributeName)
            elif "datetime" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, attributeDescription)
                return skCase.getAttributeType(attributeName)
            elif "integer" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, attributeDescription)
                return skCase.getAttributeType(attributeName)
            elif "long" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, attributeDescription)
                return skCase.getAttributeType(attributeName)
            elif "double" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, attributeDescription)
                return skCase.getAttributeType(attributeName)
            elif "byte" == attributeType:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE, attributeDescription)
                return skCase.getAttributeType(attributeName)
            else:
                attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription)
                return skCase.getAttributeType(attributeName)
        except:		
            self.log(Level.INFO, "Attributes Creation Error  ==> " + str(attributeName) + " <<>> " + str(attributeType) + " <<>> " + str(attributeDescription))
            return skCase.getAttributeType(attributeName)

    def createArtifact(self, artifactName, artifactDescription):
    
        skCase = Case.getCurrentCase().getSleuthkitCase();
                
        try:
             artId = skCase.addArtifactType(artifactName, artifactDescription)
             return skCase.getArtifactTypeID(artifactName)
        except:		
             #self.log(Level.INFO, "Artifacts Creation Error for artifact ==> " + str(artifactName) + " <<>> " + artifactDescription)
             return skCase.getArtifactTypeID(artifactName)

    def indexArtifact(self, artifact):
        blackboard = Case.getCurrentCase().getServices().getBlackboard()

        try:
            blackboard.indexArtifact(artChat)
        except:
            pass
Beispiel #14
0
class WordlistIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(WordlistIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.INFO, "Starting of plugin")
        self.host = "localhost"
        self.port = "23232"
        self.qt = "select"
        self.q = "q=text:*"
        self.fl = "fl=text"
        self.wt = "wt=json"

    def startUp(self, context):
        self.context = context
        pass

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        coreName = self.findCoreName(Case.getCurrentCase().getName())

        self.log(Level.INFO, "Core name is ==> " + str(coreName))

        if (coreName is not None):
            # get current case and the store.vol abstract file information
            #skCase = Case.getCurrentCase().getSleuthkitCase();
            #fileManager = Case.getCurrentCase().getServices().getFileManager()

            # Get the exported file name will have case number in the name and be in the export directory
            exportFile = os.path.join(
                Case.getCurrentCase().getExportDirectory(),
                Case.getCurrentCase().getName() + "_wordlist.txt")
            exportErrorFile = os.path.join(
                Case.getCurrentCase().getExportDirectory(),
                Case.getCurrentCase().getName() + "_wordlist_Errors.txt")

            url = 'http://' + self.host + ':' + self.port + '/solr/' + coreName + '/' + self.qt + '?'
            start = "start=" + str(0)
            rows = "rows=" + str(10)
            params = [self.q, start, rows, self.wt]
            p = "&".join(params)

            timesToRun = self.numberOfEntries(url, p)
            self.log(Level.INFO, "Times to Run ==> " + str(timesToRun))

            wordHashDict = {}

            with open(exportFile, "w") as wordlist:
                with open(exportErrorFile, 'w') as wordlistError:
                    for i in range(0, timesToRun + 1, 1):

                        if self.context.isJobCancelled():
                            return IngestModule.ProcessResult.OK

                        startPage = (i * 1000)
                        start = "start=" + str(startPage)
                        rows = "rows=" + str(1000)
                        params = [self.q, start, rows, self.wt]
                        p = "&".join(params)

                        self.log(Level.INFO,
                                 "Pageset to process ==> " + str(startPage))

                        connection = urllib.urlopen(url + p)
                        response = json.load(connection)
                        connection.close()

                        docsFound = response['response']['docs']

                        for docFound in docsFound:
                            try:
                                if 'text' in docFound:
                                    if (len(docFound['text']) > 1):
                                        docList = docFound['text']
                                        wordListSplit = re.split(
                                            ' |\t', docList[1])
                                        for wordl in wordListSplit:
                                            md5Hash = hashlib.md5(
                                                wordl.encode('utf-8').strip()
                                            ).hexdigest()
                                            if md5Hash in wordHashDict:
                                                continue
                                            else:
                                                wordHashDict[md5Hash] = None
                                                wordlist.write(
                                                    wordl.encode(
                                                        'utf-8').strip() +
                                                    "\n")
                                    # use whole file name
                                    md5Hash = hashlib.md5(
                                        docList[0]).hexdigest()
                                    if md5Hash in wordHashDict:
                                        continue
                                    else:
                                        wordHashDict[md5Hash] = None
                                        wordlist.write(docList[0] + "\n")
                                    # Split file name and extension and add them in seperately
                                    fileParts = docList[0].split(".")
                                    for wordl in fileParts:
                                        md5Hash = hashlib.md5(
                                            wordl.encode(
                                                'utf-8').strip()).hexdigest()
                                        if md5Hash in wordHashDict:
                                            continue
                                        else:
                                            wordHashDict[md5Hash] = None
                                            wordlist.write(
                                                wordl.encode('utf-8').strip() +
                                                "\n")

                            except Exception as e:
                                wordlistError.write("Error ==> " + str(e) +
                                                    " ==> " +
                                                    str(docFound['text']) +
                                                    "\n")

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "Wordlist",
                                              " Wordlidt has been created ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK

    def findCoreName(self, caseName):

        connection = urllib.urlopen(
            "http://localhost:23232/solr/admin/cores?action=status")
        root = ET.fromstring(connection.read())
        for child in root.iter('*'):
            if caseName in str(child.attrib.values()):
                return str(child.attrib.values()[0])
        return None

    def numberOfEntries(self, url, p):

        connection = urllib.urlopen(url + p)
        response = json.load(connection)
        connection.close()

        numFound = response['response']['numFound']

        #self.log(Level.INFO, "Number of Entries ==> " + str(numFound))
        return (numFound / 1000 + 1)
Beispiel #15
0
class GUI_TestIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(GUI_TestIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self.List_Of_GUI_Test = []

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        Combo_Box_entry = self.local_settings.getComboBox()
        self.log(Level.INFO, "Combo Box Entry Starts here =====>")
        self.log(Level.INFO, self.local_settings.getComboBox())
        self.log(Level.INFO, "<====== Combo Box Entry Ends here")

        list_box_entry = self.local_settings.getListBox()
        self.log(Level.INFO, "List Box Entry Starts here =====>")
        self.log(Level.INFO, str(list_box_entry))
        for num in range(0, len(list_box_entry)):
            self.log(Level.INFO, str(list_box_entry[num]))
        self.log(Level.INFO, "<====== List Box Entry Ends here")

        # Check to see if the file to import exists, if it does not then raise an exception and log error
        if self.local_settings.getImp_File_Flag():
            self.log(Level.INFO, self.local_settings.getFile_Imp_TF())
            self.path_to_import_file = os.path.join(
                os.path.dirname(os.path.abspath(__file__)),
                self.local_settings.getFile_Imp_TF())
            if not os.path.exists(self.path_to_import_file):
                raise IngestModuleException("File to import is not available")

        if self.local_settings.getExec_Prog_Flag():
            self.log(Level.INFO, self.local_settings.getExecFile())
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)),
                self.local_settings.getExecFile())
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "File to Run/execute does not exist.")

        #self.logger.logp(Level.INFO, GUI_TestWithUI.__name__, "startUp", str(self.List_Of_Events))
        #self.log(Level.INFO, str(self.List_Of_GUI_Test))

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See:x http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        self.log(
            Level.INFO,
            "Starting to process, Just before call to parse_safari_history")

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        self.log(Level.INFO,
                 "Starting 2 to process, Just before call to ???????")
        self.log(Level.INFO, "ending process, Just before call to ??????")

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "GUI_Test",
                                              " GUI_Test Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
class CrashDumpIngestModule(FileIngestModule):

    _logger = Logger.getLogger(CrashDumpIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    # TODO: Add any setup code that you need here.
    def startUp(self, context):
        self.filesFound = 0

        skCase = Case.getCurrentCase().getSleuthkitCase()
        # Create the artifact type, if it exists then catch the error
        try:
            self.log(Level.INFO, "Begin Create New Artifact")
            artID_ns_cd = skCase.addArtifactType(
                "TSK_ART_NS_CD", "Nintendo Switch - Crash Dumps")
        except:
            self.log(Level.INFO, "Artifact Creation Error: NS - Crash Dumps")

        try:
            attID_ns_cd_apssid = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_APSSID',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Access Point SSID")
        except:
            self.log(Level.INFO, "Attribute Creation Error: AccessPointSSID")

        try:
            attID_ns_cd_apsec = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_APSEC',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Access Point Security Type")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: AccessPointSecurityType")

        try:
            attID_ns_cd_appt = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_APPT',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Application Title")
        except:
            self.log(Level.INFO, "Attribute Creation Error: ApplicationTitle")

        try:
            attID_ns_cd_batc = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_BATC',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Battery Charge Percent")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: BatteryChargePercent")

        try:
            attID_ns_cd_charge = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_CHARGE',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Charge Enabled")
        except:
            self.log(Level.INFO, "Attribute Creation Error: ChargeEnabled")

        try:
            attID_ns_cd_con = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_CON',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Connection Status")
        except:
            self.log(Level.INFO, "Attribute Creation Error: ConnectionStatus")

        try:
            attID_ns_cd_ip = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_IP',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "IP Address")
        except:
            self.log(Level.INFO, "Attribute Creation Error: CurrentIPAddress")

        try:
            attID_ns_cd_lang = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_LANG',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Language")
        except:
            self.log(Level.INFO, "Attribute Creation Error: CurrentLanguage")

        try:
            attID_ns_cd_cpower = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_CPOWER',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Current Power State")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: CurrentSystemPowerState")

        try:
            attID_ns_cd_dpower = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_DPOWER',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Destination Power State")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: DestinationSystemPowerState")

        try:
            attID_ns_cd_ltime = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_LTIME',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Time Since Launch")
        except:
            self.log(
                Level.INFO,
                "Attribute Creation Error: ElapsedTimeSinceInitialLaunch")

        try:
            attID_ns_cd_atime = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_ATIME',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Time Since Last Awake")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: ElapsedTimeSinceLastAwake")

        try:
            attID_ns_cd_ptime = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_PTIME',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Time Since Last Power On")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: ElapsedTimeSincePowerOn")

        try:
            attID_ns_cd_errc = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_ERRC',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Error Code")
        except:
            self.log(Level.INFO, "Attribute Creation Error: ErrorCode")

        try:
            attID_ns_cd_gip = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_GIP',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Gateway IP Address")
        except:
            self.log(Level.INFO, "Attribute Creation Error: GatewayIPAddress")

        try:
            attID_ns_cd_batn = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_BATN',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Internal Battery #")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: InternalBatteryLotNumber")

        try:
            attID_ns_cd_monh = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_MONH',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Monitor Height")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: MonitorCurrentHeight")

        try:
            attID_ns_cd_monw = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_MONW',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Monitor Width")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: MonitorCurrentWidth")

        try:
            attID_ns_cd_monm = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_MONM',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Monitor Manufacturer")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: MonitorManufactureCode")

        try:
            attID_ns_cd_mons = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_MONS',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Monitor Serial #")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: MonitorSerialNumber")

        try:
            attID_ns_cd_nfs = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_NFS',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "NAND Free Space")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NANDFreeSpace")

        try:
            attID_ns_cd_nts = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_NTS',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "NAND Total Size")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NANDTotalSize")

        try:
            attID_ns_cd_nxmac = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_NXMAC',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Device MAC Address")
        except:
            self.log(Level.INFO, "Attribute Creation Error: NXMacAddress")

        try:
            attID_ns_cd_ot = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_OT',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Occurrence Tick")
        except:
            self.log(Level.INFO, "Attribute Creation Error: OccurrenceTick")

        try:
            attID_ns_cd_ots = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_OTS',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Occurrence Timestamp")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: OccurrenceTimestamp")

        try:
            attID_ns_cd_osv = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_OSV',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Os Version")
        except:
            self.log(Level.INFO, "Attribute Creation Error: OsVersion")

        try:
            attID_ns_cd_dnsp = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_DNSP',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Priority DNS IP")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: PriorityDNSIPAddress")

        try:
            attID_ns_cd_region = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_REGION',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Device Region")
        except:
            self.log(Level.INFO, "Attribute Creation Error: RegionSetting")

        try:
            attID_ns_cd_rid = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_RID',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Crash Dump ID")
        except:
            self.log(Level.INFO, "Attribute Creation Error: ReportIdentifier")

        try:
            attID_ns_cd_rappt = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_RAPPT',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Running App Title")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: RunningApplicationTitle")

        try:
            attID_ns_cd_nxsn = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_NXSN',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Device Serial #")
        except:
            self.log(Level.INFO, "Attribute Creation Error: SerialNumber")

        try:
            attID_ns_cd_netm = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_NETM',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Subnet Mask")
        except:
            self.log(Level.INFO, "Attribute Creation Error: SubnetMask")

        try:
            attID_ns_cd_tz = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_TZ',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Time Zone")
        except:
            self.log(Level.INFO, "Attribute Creation Error: TimeZone")

        try:
            attID_ns_cd_vout = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_VOUT',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Video Output Setting")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: VideoOutputSetting")

        try:
            attID_ns_cd_apmac = skCase.addArtifactAttributeType(
                'TSK_ATT_CD_APMAC',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "AP MAC Address")
        except:
            self.log(Level.INFO,
                     "Attribute Creation Error: WirelessAPMacAddress")

        self.tmp_path = os.path.join(tempfile.gettempdir(),
                                     "switch_crash_dumps")
        self.hac_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "dependencies",
            "hactoolnet.exe")

        if not os.path.exists(self.tmp_path):
            os.mkdir(self.tmp_path)

        if not os.path.exists(self.hac_path):
            raise IngestModuleException(
                "hactoolnet.exe was not found in module folder")

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.  Each file will be passed into here.
    # The 'file' object being passed in is of type org.sleuthkit.datamodel.AbstractFile.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/classorg_1_1sleuthkit_1_1datamodel_1_1_abstract_file.html
    # TODO: Add your analysis code in here.
    def process(self, file):

        skCase = Case.getCurrentCase().getSleuthkitCase()

        artID_ns_cd = skCase.getArtifactType("TSK_ART_NS_CD")
        artID_ns_cd_id = skCase.getArtifactTypeID("TSK_ART_NS_CD")

        attID_ns_cd_apssid = skCase.getAttributeType("TSK_ATT_CD_APSSID")
        attID_ns_cd_apsec = skCase.getAttributeType("TSK_ATT_CD_APSEC")
        attID_ns_cd_appt = skCase.getAttributeType("TSK_ATT_CD_APPT")
        attID_ns_cd_batc = skCase.getAttributeType("TSK_ATT_CD_BATC")
        attID_ns_cd_charge = skCase.getAttributeType("TSK_ATT_CD_CHARGE")
        attID_ns_cd_con = skCase.getAttributeType("TSK_ATT_CD_CON")
        attID_ns_cd_ip = skCase.getAttributeType("TSK_ATT_CD_IP")
        attID_ns_cd_lang = skCase.getAttributeType("TSK_ATT_CD_LANG")
        attID_ns_cd_cpower = skCase.getAttributeType("TSK_ATT_CD_CPOWER")
        attID_ns_cd_dpower = skCase.getAttributeType("TSK_ATT_CD_DPOWER")
        attID_ns_cd_ltime = skCase.getAttributeType("TSK_ATT_CD_LTIME")
        attID_ns_cd_atime = skCase.getAttributeType("TSK_ATT_CD_ATIME")
        attID_ns_cd_ptime = skCase.getAttributeType("TSK_ATT_CD_PTIME")
        attID_ns_cd_errc = skCase.getAttributeType("TSK_ATT_CD_ERRC")
        attID_ns_cd_gip = skCase.getAttributeType("TSK_ATT_CD_GIP")
        attID_ns_cd_batn = skCase.getAttributeType("TSK_ATT_CD_BATN")
        attID_ns_cd_monh = skCase.getAttributeType("TSK_ATT_CD_MONH")
        attID_ns_cd_monw = skCase.getAttributeType("TSK_ATT_CD_MONW")
        attID_ns_cd_monm = skCase.getAttributeType("TSK_ATT_CD_MONM")
        attID_ns_cd_mons = skCase.getAttributeType("TSK_ATT_CD_MONS")
        attID_ns_cd_nfs = skCase.getAttributeType("TSK_ATT_CD_NFS")
        attID_ns_cd_nts = skCase.getAttributeType("TSK_ATT_CD_NTS")
        attID_ns_cd_nxmac = skCase.getAttributeType("TSK_ATT_CD_NXMAC")
        attID_ns_cd_ot = skCase.getAttributeType("TSK_ATT_CD_OT")
        attID_ns_cd_ots = skCase.getAttributeType("TSK_ATT_CD_OTS")
        attID_ns_cd_osv = skCase.getAttributeType("TSK_ATT_CD_OSV")
        attID_ns_cd_dnsp = skCase.getAttributeType("TSK_ATT_CD_DNSP")
        attID_ns_cd_region = skCase.getAttributeType("TSK_ATT_CD_REGION")
        attID_ns_cd_rid = skCase.getAttributeType("TSK_ATT_CD_RID")
        attID_ns_cd_rappt = skCase.getAttributeType("TSK_ATT_CD_RAPPT")
        attID_ns_cd_nxsn = skCase.getAttributeType("TSK_ATT_CD_NXSN")
        attID_ns_cd_netm = skCase.getAttributeType("TSK_ATT_CD_NETM")
        attID_ns_cd_tz = skCase.getAttributeType("TSK_ATT_CD_TZ")
        attID_ns_cd_vout = skCase.getAttributeType("TSK_ATT_CD_VOUT")
        attID_ns_cd_apmac = skCase.getAttributeType("TSK_ATT_CD_APMAC")

        # Skip non-files
        if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
                or
            (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
                or (file.isFile() == False)):
            return IngestModule.ProcessResult.OK

        if (file.getParentPath().upper()
                == "/SAVE/") and (file.getName().upper()
                                  == "80000000000000D1"):

            self.log(Level.INFO, "Found crash dump save")
            self.filesFound += 1

            buf = zeros(file.getSize(), 'b')
            file.read(buf, 0, file.getSize())

            tmp_file_path = os.path.join(self.tmp_path, file.getName())

            with open(tmp_file_path, 'wb+') as tmp_file:
                tmp_file.write(buf)

            hac_cmd = [
                self.hac_path, "-t", "save", "--outdir", self.tmp_path,
                tmp_file_path
            ]
            subprocess.call(hac_cmd)

            crash_files = [
                os.path.join(self.tmp_path, f)
                for f in os.listdir(self.tmp_path)
                if os.path.isfile(os.path.join(self.tmp_path, f)) and re.match(
                    r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
                    f)
            ]

            for msgpack_file in crash_files:
                with open(msgpack_file, "rb") as infile:
                    data = msgpack.unpack(infile)

                # Don't add to blackboard if already exists
                artifactList = file.getArtifacts(artID_ns_cd_id)
                cd_ids = []
                for artifact in artifactList:
                    cd_ids.append(
                        artifact.getAttribute(
                            attID_ns_cd_rid).getValueString())
                if data["ReportIdentifier"] in cd_ids:
                    return IngestModule.ProcessResult.OK

                art = file.newArtifact(artID_ns_cd_id)

                art.addAttribute(
                    BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.
                        getTypeID(), CrashDumpIngestModuleFactory.moduleName,
                        "Nintendo Switch - Crash Dumps"))
                if "AccessPointSSID" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_apssid,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["AccessPointSSID"])))
                if "AccessPointSecurityType" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_apsec,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["AccessPointSecurityType"])))
                if "ApplicationTitle" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_appt,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ApplicationTitle"])))
                if "BatteryChargePercent" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_batc,
                            CrashDumpIngestModuleFactory.moduleName, "%.2f%%" %
                            (data["BatteryChargePercent"] / 1000.0)))
                if "ChargeEnabled" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_charge,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ChargeEnabled"])))
                if "ConnectionStatus" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_con,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ConnectionStatus"])))
                if "CurrentIPAddress" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_ip,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["CurrentIPAddress"])))
                if "CurrentLanguage" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_lang,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["CurrentLanguage"])))
                if "CurrentSystemPowerState" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_cpower,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["CurrentSystemPowerState"])))
                if "DestinationSystemPowerState" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_dpower,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["DestinationSystemPowerState"])))
                if "ElapsedTimeSinceInitialLaunch" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_ltime,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ElapsedTimeSinceInitialLaunch"])))
                if "ElapsedTimeSinceLastAwake" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_atime,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ElapsedTimeSinceLastAwake"])))
                if "ElapsedTimeSincePowerOn" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_ptime,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ElapsedTimeSincePowerOn"])))
                if "ErrorCode" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_errc,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ErrorCode"])))
                if "GatewayIPAddress" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_gip,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["GatewayIPAddress"])))
                if "InternalBatteryLotNumber" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_batn,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["InternalBatteryLotNumber"])))
                if "MonitorCurrentHeight" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_monh,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["MonitorCurrentHeight"])))
                if "MonitorCurrentWidth" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_monw,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["MonitorCurrentWidth"])))
                if "MonitorManufactureCode" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_monm,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["MonitorManufactureCode"])))
                if "MonitorSerialNumber" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_mons,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["MonitorSerialNumber"])))
                if "NANDFreeSpace" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_nfs,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["NANDFreeSpace"])))
                if "NANDTotalSize" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_nts,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["NANDTotalSize"])))
                if "NXMacAddress" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_nxmac,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["NXMacAddress"])))
                if "OccurrenceTick" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_ot,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["OccurrenceTick"])))
                if "OccurrenceTimestamp" in data:
                    OccurrenceTimestamp = datetime.fromtimestamp(
                        data["OccurrenceTimestamp"]).strftime('%H:%M %d/%m/%Y')
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_ots,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(OccurrenceTimestamp)))
                if "OsVersion" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_osv,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["OsVersion"])))
                if "PriorityDNSIPAddress" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_dnsp,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["PriorityDNSIPAddress"])))
                if "RegionSetting" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_region,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["RegionSetting"])))
                if "ReportIdentifier" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_rid,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["ReportIdentifier"])))
                if "RunningApplicationTitle" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_rappt,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["RunningApplicationTitle"])))
                if "SerialNumber" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_nxsn,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["SerialNumber"])))
                if "SubnetMask" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_netm,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["SubnetMask"])))
                if "TimeZone" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_tz,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["TimeZone"])))
                if "VideoOutputSetting" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_vout,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["VideoOutputSetting"])))
                if "WirelessAPMacAddress" in data:
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_ns_cd_apmac,
                            CrashDumpIngestModuleFactory.moduleName,
                            str(data["WirelessAPMacAddress"])))
                # Fire an event to notify the UI and others that there is a new artifact
                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(CrashDumpIngestModuleFactory.moduleName,
                                    artID_ns_cd, None))

            return IngestModule.ProcessResult.OK

    # Where any shutdown code is run and resources are freed.
    # TODO: Add any shutdown code that you need here.
    def shutDown(self):
        # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread)
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA,
            CrashDumpIngestModuleFactory.moduleName,
            str(self.filesFound) + " crash dumps found")
        ingestServices = IngestServices.getInstance().postMessage(message)

        # remove temp dir after use
        if os.path.exists(self.tmp_path):
            shutil.rmtree(self.tmp_path)
Beispiel #17
0
 def __init__(self):
     self._logger = Logger.getLogger(self.__class__.__name__)
     self._PACKAGE_NAME = "com.dewmobile.kuaiya.play"
     self._MODULE_NAME = "Zapya Analyzer"
     self._MESSAGE_TYPE = "Zapya Message"
     self._VERSION = "5.8.3"
class QNX6ReaderIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(QNX6ReaderIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/latest/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        #Permet de faire avancer la progress bar dans autopsy
        progressBar.switchToDeterminate(100)
        progressBar.progress("Parsing Super Block", 10)
        case = Case.getCurrentCase()
        sKCase = case.getSleuthkitCase()
        wDirPath = case.getModuleDirectory()

        #Repertoire dans lequel extraire les donnees
        realRootDir = wDirPath + "\\" + dataSource.getName() + "\\Partition0"
        if (os.path.exists(realRootDir) == False):
            try:
                os.makedirs(realRootDir)
            except OSError as e:
                pass

        #Recupere le diskimg au format AbstractFile
        fileManager = case.getServices().getFileManager()
        qnx6Img = fileManager.findFiles(dataSource, "%%")[0]

        #Construciton de l'objet QNX6 permettant de recuperer les infos du superblock ect ...
        qnx6fs = QNX6_FS(qnx6Img, self._logger)

        #Il faudrait prendre en compte la place occupe par la partition si il y en a
        SBoffset = 0 + qnx6fs.QNX6_BOOTBLOCK_SIZE
        #On recupere les informations du premier super block
        SB = qnx6fs.readSuperBlock(SBoffset)
        #La fin du super block 1 permet de calculer les addresses a partir des pointeurs se trouvant dans les inode
        SBendOffset = SB["SB_end"]
        #Si il s agit bien d un FS QNX6
        if (qnx6fs.isQNX6FS(SB)):
            self.postMessage("QNX6 file system detected")

            #Creation d un rapport contenant les informations du super blocks
            self.createAndPostSBReport(dataSource.getName(),
                                       wDirPath + "\\..\\Reports", SB)
            self.postMessage("File System report created")

            #Identification du SuperBlock actif (Le super block ayant l ID le plus grand est le super block actif)
            #L autre block est le backupSuperBlock qui peut etre utile pour retrouver les donnees effacees
            sndSBOffset = qnx6fs.getSndSPBlockOffset(SB)
            sndSB = qnx6fs.readSuperBlock(sndSBOffset)
            backUpSB = sndSB
            if (qnx6fs.isQNX6FS(sndSB)):
                if (sndSB['serialNum'] > SB['serialNum']):
                    backUpSB = SB
                    SB = sndSB

            #Recuperation des inodes a partir des rootNodes du superBlock actif
            progressBar.progress("Parsing inodes", 20)
            inodeTree = qnx6fs.getInodesFromRootNodes(SB["RootNode"]['ptr'],
                                                      SB["tailleBlock"],
                                                      SBendOffset,
                                                      SB['RootNode']['level'])

            #Recuperation des inodes a partir des rootNodes du backup superBlock (utile pour retrouver les donnees effacees)
            backUpInodeTree = qnx6fs.getInodesFromRootNodes(
                backUpSB["RootNode"]['ptr'], backUpSB["tailleBlock"],
                SBendOffset, backUpSB['RootNode']['level'])

            #On recupere les inodes correspondant a des fichier dont le nom est long (traite differement)
            longNameObj = qnx6fs.getLongFileNames(SB)

            #Recupere dans dirTree un dictionaire contenant l id des dossiers et des fichiers ainsi que leurs noms et l id de leurs parents
            progressBar.progress("Parsing directory structure", 65)
            dirTree = qnx6fs.getDirTree(inodeTree, backUpInodeTree,
                                        longNameObj, SB['tailleBlock'],
                                        SBendOffset)

            #Affichage des inodes dans le fichier de log
            self.log(Level.INFO, str(inodeTree))
            self.log(Level.INFO, "\n\n\n\n\n\n\n\n\n\n\n")
            self.log(Level.INFO, str(backUpInodeTree))
            self.log(Level.INFO, str(dirTree))

            #On recupere la liste des fichiers et repertoires avec toutes les informations associees
            progressBar.progress("Files and dirs recovery from inodes", 80)
            dirList, fileList = qnx6fs.getDirsAndFiles(inodeTree, dirTree,
                                                       SB['tailleBlock'],
                                                       SBendOffset)

            #On cree un dossier special ou l on met les fichiers supprimees dont on a pas pu retrouver le path et le nom
            retrivedContentDirName = "retrieved_content//"
            dirPath = realRootDir + "\\" + retrivedContentDirName
            if (not os.path.exists(dirPath)):
                try:
                    os.makedirs(dirPath)
                except OSError as e:
                    self.postMessage("Erreur lors de la creation de : " +
                                     dirPath)
            #On recupere les fichiers supprimees dont on a pas pu retrouver le path et le nom
            deletedContent = qnx6fs.getDeletedContent(retrivedContentDirName,
                                                      inodeTree, dirTree,
                                                      SB['tailleBlock'],
                                                      SBendOffset)

            #On cree les dossiers retrouves dans un repertoire du projet
            progressBar.progress("Creation of recovered files and dirs", 90)
            for rep in dirList:
                dirPath = realRootDir + "\\" + os.path.join(
                    rep["path"], rep["name"])
                if (not os.path.exists(dirPath)):
                    try:
                        os.makedirs(dirPath)
                    except OSError as e:
                        self.postMessage("Erreur lors de la creation de : " +
                                         dirPath)
                        self.log(Level.INFO, os.strerror(e.errno))
                        pass

            #On cree les fichiers retrouves dans un repertoire du projet
            for file in fileList + deletedContent:
                filePath = realRootDir + "\\" + os.path.join(
                    file["path"], file["name"])
                if (not os.path.exists(filePath)):
                    try:
                        f = open(filePath, "wb+")
                        if (file["data"] != None):
                            f.write(file["data"])
                        f.close()
                    except IOError as e:
                        self.postMessage("Erreur lors de la creation de : " +
                                         filePath)
                        self.log(Level.INFO, os.strerror(e.errno))
                        pass

            progressBar.progress("Creation of reports", 95)
            self.postMessage("Files extracted in " + realRootDir)

            #Creation de l arboresence dans Autopsy a partir de la datasource et des donnees retrouvees
            virtualRootDir = Case.getCurrentCase().getSleuthkitCase(
            ).addLocalDirectory(dataSource.getId(), "Partition" + str(0))
            self.addTree(realRootDir, virtualRootDir)

            #Creation du rapport contenant toutes les informations extraites
            self.createAndPostContentReport(dataSource.getName(),
                                            wDirPath + "\\..\\Reports",
                                            dirList, fileList + deletedContent)
        else:
            self.postMessage("No QNX6 file system detected")

        #On notifie Autopsy que des elements ont ete ajoute a la data source
        Case.getCurrentCase().notifyDataSourceAdded(dataSource,
                                                    UUID.randomUUID())
        progressBar.progress("Task completed", 100)
        return IngestModule.ProcessResult.OK

    #Ajoute le contenu d un repertoire dans le datasource d autopsy
    def addTree(self, path, parent):
        sCase = Case.getCurrentCase().getSleuthkitCase()
        for f in os.listdir(path):
            fpath = os.path.join(path, f)
            if os.path.isfile(fpath):
                sCase.addLocalFile(f, fpath, os.path.getsize(fpath),
                                   long(os.path.getctime(fpath)),
                                   long(os.path.getctime(fpath)),
                                   long(os.path.getatime(fpath)),
                                   long(os.path.getmtime(fpath)), True, parent)
            if os.path.isdir(fpath):
                rep = sCase.addLocalFile(f, fpath, os.path.getsize(fpath),
                                         long(os.path.getctime(fpath)),
                                         long(os.path.getctime(fpath)),
                                         long(os.path.getatime(fpath)),
                                         long(os.path.getmtime(fpath)), False,
                                         parent)
                self.addTree(fpath, rep)

    #Cherche le repertoire dans le data source ayant dirName comme nom
    def findAutopsyDir(self, dirName):
        for autopsyDir in self.autopsyLocalDirList:
            if (dirName == autopsyDir.getName()):
                return autopsyDir
        return self.autopsyLocalDirList[0]

    #Verifie que le repertoire autopsy ayant le nom dirName existe
    def dirNameIsAutopsyDir(self, dirName):
        for autopsyDir in self.autopsyLocalDirList:
            if (dirName == autopsyDir.getName()):
                return True
        return False

    #Permet de poster un message dans autopsy
    def postMessage(self, message):
        IngestServices.getInstance().postMessage(
            IngestMessage.createMessage(
                IngestMessage.MessageType.DATA,
                QNX6ReaderIngestModuleFactory.moduleName, message))

    #Permet de creer un rapport sur la liste des fichiers et repertoires
    def createAndPostContentReport(self, name, path, dirList, fileList):
        filename = name + "ContentReport.txt"
        if (not path):
            os.makedirs(path)
        filePath = os.path.join(path, filename)
        report = open(filePath, 'wb+')
        report.write("------" + name + " QNX6FS Content Report------\n")

        report.write("\n\n------Directories Extracted------\n")
        for rep in dirList:
            report.write("Path : " + rep['path'] + "  |  Name : " +
                         rep['name'] + "  |  Size : " + str(rep['size']) +
                         "  |  UID : " + str(rep['uid']) + "  |  GID : " +
                         str(rep['gid']) + "  |  ftime : " +
                         datetime.fromtimestamp(int(rep['ftime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  atime : " +
                         datetime.fromtimestamp(int(rep['atime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  ctime : " +
                         datetime.fromtimestamp(int(rep['ctime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  mtime : " +
                         datetime.fromtimestamp(int(rep['mtime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  status : " +
                         str(rep['status']) + "\n")

        report.write("\n\n------Files Extracted------\n")
        for file in fileList:
            report.write("Path : " + file['path'] + "  |  Name : " +
                         file['name'] + "  |  Size : " + str(file['size']) +
                         "  |  UID : " + str(file['uid']) + "  |  GID : " +
                         str(rep['gid']) + "  |  ftime : " +
                         datetime.fromtimestamp(int(file['ftime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  atime : " +
                         datetime.fromtimestamp(int(file['atime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  ctime : " +
                         datetime.fromtimestamp(int(file['ctime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  mtime : " +
                         datetime.fromtimestamp(int(file['mtime'])).strftime(
                             "%m/%d/%Y, %H:%M:%S") + "  |  status : " +
                         str(file['status']) + "\n")
        report.close()
        # Add the report to the Case, so it is shown in the tree
        Case.getCurrentCase().addReport(
            filePath, QNX6ReaderIngestModuleFactory.moduleName,
            name + " Content report")

    #Creer un rapport contenant les informations du super block
    def createAndPostSBReport(self, name, path, SB):
        filename = name + "SuperBlockReport.txt"
        if (not path):
            os.makedirs(path)
        filePath = os.path.join(path, filename)
        report = open(filePath, 'wb+')
        report.write("------" + name +
                     " QNX6FS Super Block informations------\n\n")
        report.write("Serial number : " + hex(int(SB["serialNum"])) + "\n")
        report.write("Magic number : " + hex(int(SB["magic"])) + "\n")
        report.write("File system creation time :  " + datetime.fromtimestamp(
            int(SB['ctime'])).strftime("%m/%d/%Y, %H:%M:%S") + "\n")
        report.write("File system modification time :  " +
                     datetime.fromtimestamp(int(SB['ctime'])).strftime(
                         "%m/%d/%Y, %H:%M:%S") + "\n")
        report.write("File system access time :  " + datetime.fromtimestamp(
            int(SB['ctime'])).strftime("%m/%d/%Y, %H:%M:%S") + "\n")
        report.write("Block Size : " + str(int(SB["tailleBlock"])) +
                     " bytes \n")
        report.write("Number of blocks : " + hex(int(SB["nbBlocks"])) + "\n")
        report.write("Number of free blocks : " +
                     hex(int(SB["nbBlocksLibres"])) + "\n")
        report.write("Number of inodes : " + hex(int(SB["nbInodes"])) + "\n")
        report.write("Number of free inodes : " +
                     hex(int(SB["nbInodesLibres"])) + "\n")
        report.close()

        # Add the report to the Case, so it is shown in the tree
        Case.getCurrentCase().addReport(
            filePath, QNX6ReaderIngestModuleFactory.moduleName,
            name + " Super Block Report")
class ThumbsIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ThumbsIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
 
    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        #Show parameters that are passed in
        self.path_to_exe_thumbs = os.path.join(os.path.dirname(os.path.abspath(__file__)), "thumbs_viewer.exe")
        if not os.path.exists(self.path_to_exe_thumbs):
            raise IngestModuleException("Thumbs_viewer File to Run/execute does not exist.")

     
        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See:x http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()
        
       # Set the database to be read to the once created by the prefetch parser program
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        thumb_files = fileManager.findFiles(dataSource, "thumbs.db", "")
        numFiles = len(thumb_files)
        self.log(Level.INFO, "Number of Thumbs.db files found ==> " + str(numFiles))
        
		# Create Event Log directory in temp directory, if it exists then continue on processing		
        Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath()
        tmp_dir = Case.getCurrentCase().getTempDirectory()
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
		    os.mkdir(Temp_Dir + "\Thumbs.db")
        except:
		    self.log(Level.INFO, "Thumbs.db Directory already exists " + Temp_Dir)

        for thumb_file in thumb_files:
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            self.log(Level.INFO, "Processing file: " + thumb_file.getName())
            #fileCount += 1
    
            out_dir = os.path.join(Temp_Dir + "\Thumbs.db", str(thumb_file.getId()) + "-" + thumb_file.getName())
            try:
		        os.mkdir(Temp_Dir + "\Thumbs.db\\" + str(thumb_file.getId()) + "-" + thumb_file.getName())
            except:
		        self.log(Level.INFO, str(thumb_file.getId()) + "-" + thumb_file.getName() + " Directory already exists " + Temp_Dir)


            # Save the thumbs.DB locally in the ModuleOutput folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(tmp_dir, str(thumb_file.getId()) + "-" + thumb_file.getName())
            ContentUtils.writeToFile(thumb_file, File(lclDbPath))

            # Run thumbs_viewer against the selected Database
            self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_thumbs + " -O " + out_dir + " " + lclDbPath)
            pipe = Popen([self.path_to_exe_thumbs, "-O", out_dir, lclDbPath], stdout=PIPE, stderr=PIPE)
            out_text = pipe.communicate()[0]
            self.log(Level.INFO, "Output from run is ==> " + out_text)               
        
            # Get the parent abstract file Information
            abstract_file_info = skCase.getAbstractFileById(thumb_file.getId())
            #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info))
        
            files = next(os.walk(out_dir))[2]
            for file in files:
                self.log(Level.INFO, " File Name is ==> " + file)
                
                dev_file = os.path.join(out_dir, file)
                local_file = os.path.join("ModuleOutput\\thumbs.db\\" + str(thumb_file.getId()) + "-" + thumb_file.getName(), file)
                self.log(Level.INFO, " Dev File Name is ==> " + dev_file)
                self.log(Level.INFO, " Local File Name is ==> " + local_file)
                
                if not(self.check_dervived_existance(dataSource, file, abstract_file_info)):
                
                    # Add dervived file
                    # Parameters Are:
                    #    File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, 
                    #     Tool Version, Other Details, Encoding Type
                    dervived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \
                                             0, 0, 0, 0, True, abstract_file_info, "", "thumb_viewer", "1.0.2.6", "", TskData.EncodingType.NONE)
                    #self.log(Level.INFO, "Derived File ==> " + str(dervived_file))
                else:
                    pass                
        
        
            try:
              os.remove(lclDbPath)
            except:
              self.log(Level.INFO, "removal of thumbs.db file " + lclDbPath + " failed " )

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
            "Thumbs.db", " Thumbs.db Files Have Been Analyzed " )
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK                
		
         

    def check_dervived_existance(self, dataSource, file_name, parent_file_abstract):

        fileManager = Case.getCurrentCase().getServices().getFileManager()
        dervived_file = fileManager.findFiles(dataSource, file_name, parent_file_abstract)
        numFiles = len(dervived_file)
    
        if numFiles == 0:
            return True
        else:
            return False
Beispiel #20
0
class AndroidGeodataCrawler(FileIngestModule):

    _logger = Logger.getLogger(AndroidGeodataCrawlerFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)

    # Autopsy will pass in the settings from the UI panel
    def __init__(self, settings):
        self.local_settings = settings

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        # Determine if user configured a flag in UI
        if self.local_settings.getFlag():
            self.stanpl = True
        else:
            self.stanpl = False

        # Counters
        self.jobId = context.getJobId()
        self.filesFound = 0
        self.dbFound = 0
        self.picFound = 0
        self.jsonFound = 0

        self.lastFile_rep = ''
        self.el_rep = None

        self.lastFile = ''
        self.el = None

        # Inits the xml element
        self.root = et.Element("androidgeodata")
        self.root_report = et.Element("report")
        # File where the xml is stored
        self.xmlname = os.path.join( Case.getCurrentCase().getReportDirectory(), Case.getCurrentCase().getName()+"_"+str(self.jobId))

        # Checks whether the JSON file exists, if not the module doesn't run
        path_to_dict = os.path.dirname(os.path.abspath(__file__)) + '/dictionary.json'
        if not os.path.exists(path_to_dict):
            raise IngestModuleException("The dictionary file was not found in module folder")
        else:
            try:
                self.dict = json.load( open(path_to_dict) )
            except:
                raise IngestModuleException("The dictionary file was not loaded")

    # Where the analysis is done.  Each file will be passed into here.
    # The 'file' object being passed in is of type org.sleuthkit.datamodel.AbstractFile.
    def process(self, file):

        ####################
        # Proprietary code #
        ####################

        # Functions
        def getBlackboardAtt(label, value):
            return BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.fromLabel(label).getTypeID(),
                                       AndroidGeodataCrawlerFactory.moduleName, value )

        # Use blackboard class to index blackboard artifacts for keyword search
        blackboard = Case.getCurrentCase().getServices().getBlackboard()

        # Skip non-files
        if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or
                (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or
                (file.isFile() == False)):
            return IngestModule.ProcessResult.OK

        # Counts files found
        self.filesFound += 1

        # Inits variables
        data = []
        ext = file.getNameExtension()

        # Supported picture extensions
        if ext in ("jpg","jpeg","png"):
            ext = "pic"

        # Supported db extensions
        if ext in ("db","sqlite","sqlite3","db3"):
            ext = "db"

        # Analyse only the files:
        if ext in ("", "db", "pic", "json", "txt", "log"):
            # Handles the file
            handler = FileHandler(file, file.getNameExtension(), file.getName(), file.getUniquePath(), file.getId(), self.stanpl )

            # Stores the file
            if handler.store_file(Case.getCurrentCase().getTempDirectory()):
                # Bool value to check whether the file has been analysed to stop other controls
                bool = True

                # # # # # # # # # # # # # # # # # # # # # # # # # # #  #
                #                                                      #
                # At the moment supported files are: db, pic and json. #
                #                                                      #
                # # # # # # # # # # # # # # # # # # # # # # # # # # #  #

                # If db
                if ext in ("db",""):

                    self.dbFound += 1

                    if ext == "db":
                        bool = False

                    # Tries a connection to verify whether the file is a db
                    if handler.connect() and not (util.findValue( handler.getName(), self.dict, "dict_db")):
                        bool = False

                        tables = handler.getTables()
                        if tables:
                            for table in tables:
                                resultSet = handler.query(table)
                                try:
                                    resultSetMetaData = resultSet.getMetaData()
                                    numColumns = resultSetMetaData.getColumnCount()
                                except:
                                    resultSetMetaData = None
                                    numColumns = None

                                if (resultSet and resultSetMetaData and numColumns):
                                    rows = []
                                    while resultSet.next():
                                        attributes = {}

                                        columns = range(1, numColumns + 1 )

                                        for column in columns:

                                            try:
                                                nameColumn = resultSetMetaData.getColumnName(column)
                                            except:
                                                nameColumn = None

                                            if nameColumn:
                                                temp = handler.processDB(resultSet, column, nameColumn, self.dict)
                                                if temp:
                                                    if temp[0] == "single":
                                                        attributes[temp[1]] = temp[2]
                                                        attributes["name"] = handler.getName()
                                                        attributes["type"] = "db"
                                                        attributes["table"] = table
                                                        attributes["path"] = handler.getPath()
                                                        if temp[1] in ("latitude", "longitude", "datetime", "text"):
                                                            attributes["column_"+temp[1]] = nameColumn
                                                        else:
                                                            if not "column_other" in attributes:
                                                                attributes["column_other"] = []
                                                            attributes["column_other"].append(nameColumn)

                                                    if temp[0] == "multiple":
                                                        if temp[1]:
                                                            for x in temp[1]:
                                                                x["name"] = handler.getName()
                                                                x["table"] = table
                                                                x["type"] = "db"
                                                                x["path"] = handler.getPath()
                                                                x["column"] = nameColumn
                                                            rows = rows + temp[1]

                                        if attributes:
                                            rows.append(attributes)

                                    if rows:
                                        data = data + rows

                            handler.close()

                # the file is not a db, is it a pic then?
                if ext in ("pic", "") and bool:
                    self.picFound += 1

                    if ext == "pic":
                        bool = False

                    res = handler.processPic()
                    if res:
                        bool = False
                        res["name"] = handler.getName()
                        res["path"] = handler.getPath()
                        res["type"] = "pic"
                        res["description"] = "from pic"
                        data.append(res)

                # The file is not a pic either, is it a file json?
                if ext in ("json","") and bool:
                    self.jsonFound += 1

                    if ext == "json":
                        bool = False

                    res = handler.processJsonFile()
                    if res:
                        bool = False
                        for x in res:
                            x["name"] = handler.getName()
                            x["path"] = handler.getPath()
                            x["type"] = "json"
                            x["description"] = "from file json"

                        data += res

                if bool and self.stanpl:
                    res = handler.processFile()
                    if res:
                        res["name"] = handler.getName()
                        res["path"] = handler.getPath()
                        res["type"] = "file"
                        res["description"] = "from file"

                        data.append(res)

                # Deletes the file temporarily stored
                e = handler.delete_file()
                if e:
                    self.log(Level.INFO, "Error in deleting the file "+handler.getName()+", message = "+e)

        if data:
            el = None
            el_rep = None
            for item in data:
                if "latitude" and "longitude" in item:

                    if not el:
                        # No element
                        el = et.SubElement(self.root, item["type"])
                        et.SubElement(el, "app").text = item["path"]
                        et.SubElement(el, "path").text = item["path"]
                        et.SubElement(el, "name").text = item["name"]
                        if "table" in item:
                            tables = et.SubElement(el, "tables")
                            table = et.SubElement(tables, "table", name=item["table"])
                            if "column" in item:
                                et.SubElement(table,"column", type="json").text = item["column"]
                            else:
                                et.SubElement(table,"column", type="latitude").text = item["column_latitude"]
                                et.SubElement(table,"column", type="longitude").text = item["column_longitude"]
                                if "column_datetime" in item:
                                    et.SubElement(table,"column", type="datetime").text = item["column_datetime"]
                    else:
                        #Element already exists
                        if not any(table.get("name") == item["table"] for table in el.find("tables").iter("table") ):
                            #No table
                            table = et.SubElement(el.find("tables"), "table", name=item["table"])
                            if "column" in item:
                                et.SubElement(table,"column", type="json").text = item["column"]
                            else:
                                et.SubElement(table,"column", type="latitude").text = item["column_latitude"]
                                et.SubElement(table,"column", type="longitude").text = item["column_longitude"]
                                if "column_datetime" in item:
                                    et.SubElement(table,"column", type="datetime").text = item["column_datetime"]

                    art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT)

                    if "datetime" in item and item["datetime"] != "":
                        if isinstance(item["datetime"],str):
                            if el.tag == "pic":
                                att1 = getBlackboardAtt("TSK_DATETIME", timestamp.getTimestampFromPicDatetime(item["datetime"]))
                            else:
                                att1 = getBlackboardAtt("TSK_DATETIME", timestamp.getTimestampFromString(item["datetime"]))
                        else:
                            if len(str(item["datetime"])) == 10:
                                att1 = getBlackboardAtt("TSK_DATETIME",  item["datetime"])
                            elif len(str(item["datetime"])) == 13:
                                att1 = getBlackboardAtt("TSK_DATETIME",  int(item["datetime"]/1000))
                        art.addAttribute(att1)

                    att2 = getBlackboardAtt("TSK_GEO_LATITUDE", item["latitude"])

                    att3 = getBlackboardAtt("TSK_GEO_LONGITUDE", item["longitude"])

                    att4 = getBlackboardAtt("TSK_PROG_NAME", item["name"])

                    art.addAttributes([att2, att3, att4])

                    if "column" in item:
                        att5 = getBlackboardAtt("TSK_DESCRIPTION", "table: "+item["table"]+", column = "+item["column"])
                        art.addAttribute(att5)
                    elif "table" in item:
                        att5 = getBlackboardAtt("TSK_DESCRIPTION", "table: "+item["table"]+", column = "+item["column_latitude"]+", "+item["column_longitude"])
                        art.addAttribute(att5)
                    elif "description" in item:
                        att5 = getBlackboardAtt("TSK_DESCRIPTION", item["description"])
                        art.addAttribute(att5)

                    try:
                        # index the artifact for keyword search
                        blackboard.indexArtifact(art)
                    except Blackboard.BlackboardException:
                        self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName())

                if "text" in item:
                    if not el:
                        # No element
                        el = et.SubElement(self.root, item["type"])
                        et.SubElement(el, "app").text = item["path"] #.split("\/")[-2]
                        et.SubElement(el, "path").text = item["path"]
                        et.SubElement(el, "name").text = item["name"]
                        if "table" in item:
                            tables = et.SubElement(el, "tables")
                            table = et.SubElement(tables, "table", name=item["table"])
                            if "column_text" in item:
                                et.SubElement(table,"column", type="text").text = item["column_text"]
                    else:
                        #Element already exists
                        if not any(table.get("name") == item["table"] for table in el.find("tables").iter("table") ):
                            #No table
                            table = et.SubElement(el.find("tables"), "table", name=item["table"])
                            if "column_text" in item:
                                et.SubElement(table,"column", type="text").text = item["column_text"]


                    art_text = file.newArtifact(blackboard.getOrAddArtifactType("geodataTEXT","Geodata in text").getTypeID())
                    att = getBlackboardAtt("TSK_TEXT", item["text"] )
                    art_text.addAttribute(att)
                    if "column_text" and "table" in item:
                        att1 = getBlackboardAtt("TSK_DESCRIPTION", "table: "+item["table"]+", column = "+item["column_text"])
                        art_text.addAttribute(att1)
                    elif "description" in item:
                        att1 = getBlackboardAtt("TSK_DESCRIPTION", item["description"])
                        art_text.addAttribute(att1)


                    try:
                        # index the artifact for keyword search
                        blackboard.indexArtifact(art_text)
                    except Blackboard.BlackboardException:
                        self.log(Level.SEVERE, "Error indexing artifact " + art_text.getDisplayName())

                if "column_other" in item:
                    if not el_rep:
                        # No element
                        el_rep = et.SubElement(self.root_report, item["type"])
                        et.SubElement(el_rep, "app").text = item["path"]
                        et.SubElement(el_rep, "path").text = item["path"]
                        et.SubElement(el_rep, "name").text = item["name"]
                        tables = et.SubElement(el_rep, "tables")
                        table = et.SubElement(tables, "table", name=item["table"])
                        for column in item["column_other"]:
                            et.SubElement(table,"column", type="").text = column
                    else:
                        # Element already exists
                        t = True

                        for table in el_rep.find("tables").iter("table"):
                            if table.get("name") == item["table"]:
                                #Table already exists
                                t = False
                                for column in item["column_other"]:
                                    if not any(c.text == column for c in table.iter("column")):
                                        et.SubElement(table, "column").text = column
                                        #break
                        if t:
                            #No table
                            table = et.SubElement(el_rep.find("tables"), "table", name=item["table"])
                            for column in item["column_other"]:
                                et.SubElement(table,"column", type="").text = column

            # Fire an event to notify the UI and others that there is a new artifact
            IngestServices.getInstance().fireModuleDataEvent(
            ModuleDataEvent(AndroidGeodataCrawlerFactory.moduleName,
            BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT, None))

        return IngestModule.ProcessResult.OK

    # Where any shutdown code is run and resources are freed.
    # TODO: Add any shutdown code that you need here.
    def shutDown(self):
        self.xmlname += "_"+str(self.picFound)+str(self.dbFound)+str(self.jsonFound)+str(self.filesFound)+"_androidgeodata.xml"

        report = open(self.xmlname, 'w')
        report.write(
            str(xml.dom.minidom.parseString(et.tostring(self.root)).toprettyxml())+
            " \n <!-- Report of possible other coordinates --> \n <!--"+
            str(xml.dom.minidom.parseString(et.tostring(self.root_report)).toprettyxml())
            +"-->" )
        report.close()
        Case.getCurrentCase().addReport(self.xmlname, AndroidGeodataCrawlerFactory.moduleName, "AndroidGeodata XML")
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, AndroidGeodataCrawlerFactory.moduleName,
             "In this thread: "+
            str(self.filesFound)+" files found, "+
            str(self.picFound)+" pictures, "+
            str(self.dbFound)+" DBs and "+
            str(self.jsonFound)+" json processed. "
            "\n A xml ("+self.xmlname+") and a report have been created ")
        IngestServices.getInstance().postMessage(message)
 def log(self, level, msg):
     self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
     self._logger = Logger.getLogger(self.__class__.__name__)
Beispiel #22
0
class ParseSAMIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ParseSAMIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self.List_Of_Events = []

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        # Get path to EXE based on where this script is run from.
        # Assumes EXE is in same folder as script
        # Verify it is there before any ingest starts
        self.path_to_exe = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "samparse.exe")
        if not os.path.exists(self.path_to_exe):
            raise IngestModuleException("EXE was not found in module folder")

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Set the database to be read to the once created by the SAM parser program
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "SAM", "config")
        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create Event Log directory in temp directory, if it exists then continue on processing
        Temp_Dir = Case.getCurrentCase().getTempDirectory()
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
            os.mkdir(Temp_Dir + "\SAM")
        except:
            self.log(Level.INFO, "SAM Directory already exists " + Temp_Dir)

        # Write out each Event Log file to the temp directory
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the DB locally in the temp folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(Temp_Dir + "\SAM", file.getName())
            ContentUtils.writeToFile(file, File(lclDbPath))

        # Example has only a Windows EXE, so bail if we aren't on Windows
        if not PlatformUtil.isWindowsOS():
            self.log(Level.INFO,
                     "Ignoring data source.  Not running on Windows")
            return IngestModule.ProcessResult.OK

        # Run the EXE, saving output to a sqlite database
        self.log(
            Level.INFO, "Running program on data source parm 1 ==> " +
            Temp_Dir + "  Parm 2 ==> " + Temp_Dir + "\\SAM.db3")
        subprocess.Popen([
            self.path_to_exe, Temp_Dir + "\\SAM\\SAM", Temp_Dir + "\\SAM.db3"
        ]).communicate()[0]

        for file in files:
            # Open the DB using JDBC
            lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(),
                                     "SAM.db3")
            #lclDbPath = "C:\\Users\\Forensic_User\\OneDrive\\Code\\Python_Scripts\\SRUDB\SRUDB.DB3"
            self.log(Level.INFO,
                     "Path the SAM database file created ==> " + lclDbPath)
            try:
                Class.forName("org.sqlite.JDBC").newInstance()
                dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                     lclDbPath)
            except SQLException as e:
                self.log(
                    Level.INFO, "Could not open database file (not SQLite) " +
                    file.getName() + " (" + e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            #PSlsit => TSK_PROG_RUN
#

# Query the contacts table in the database and get all columns.
            try:
                stmt = dbConn.createStatement()
                resultSet = stmt.executeQuery(
                    "Select tbl_name from SQLITE_MASTER; ")
                self.log(Level.INFO, "query SQLite Master table")
            except SQLException as e:
                self.log(
                    Level.INFO, "Error querying database for SAM table (" +
                    e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            try:
                self.log(Level.INFO, "Begin Create New Artifacts")
                artID_sam = skCase.addArtifactType("TSK_SAM", "SAM File")
            except:
                self.log(
                    Level.INFO,
                    "Artifacts Creation Error, some artifacts may not exist now. ==> "
                )

            artID_sam = skCase.getArtifactTypeID("TSK_SAM")
            artID_sam_evt = skCase.getArtifactType("TSK_SAM")

            # Cycle through each row and create artifacts
            while resultSet.next():
                try:
                    self.log(
                        Level.INFO,
                        "Result (" + resultSet.getString("tbl_name") + ")")
                    table_name = resultSet.getString("tbl_name")
                    #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ")
                    SQL_String_1 = "Select * from " + table_name + ";"
                    SQL_String_2 = "PRAGMA table_info('" + table_name + "')"
                    #self.log(Level.INFO, SQL_String_1)
                    #self.log(Level.INFO, SQL_String_2)

                    Column_Names = []
                    Column_Types = []
                    resultSet2 = stmt.executeQuery(SQL_String_2)
                    while resultSet2.next():
                        Column_Names.append(
                            resultSet2.getString("name").upper())
                        Column_Types.append(resultSet2.getString("type"))
                        if resultSet2.getString("type") == "text":
                            try:
                                attID_ex1 = skCase.addArtifactAttributeType(
                                    "TSK_" +
                                    resultSet2.getString("name").upper(),
                                    BlackboardAttribute.
                                    TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                                    resultSet2.getString("name"))
                                #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                            except:
                                self.log(
                                    Level.INFO, "Attributes Creation Error, " +
                                    resultSet2.getString("name") + " ==> ")
                        else:
                            try:
                                attID_ex1 = skCase.addArtifactAttributeType(
                                    "TSK_" +
                                    resultSet2.getString("name").upper(),
                                    BlackboardAttribute.
                                    TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.
                                    DATETIME, resultSet2.getString("name"))
                                #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                            except:
                                self.log(
                                    Level.INFO, "Attributes Creation Error, " +
                                    resultSet2.getString("name") + " ==> ")

                    resultSet3 = stmt.executeQuery(SQL_String_1)
                    while resultSet3.next():
                        art = file.newArtifact(artID_sam)
                        Column_Number = 1
                        for col_name in Column_Names:
                            #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ")
                            #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ")
                            #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ")
                            c_name = "TSK_" + col_name
                            #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1]))
                            attID_ex1 = skCase.getAttributeType(c_name)
                            if Column_Types[Column_Number - 1] == "text":
                                art.addAttribute(
                                    BlackboardAttribute(
                                        attID_ex1,
                                        ParseSAMIngestModuleFactory.moduleName,
                                        resultSet3.getString(Column_Number)))
                            else:
                                art.addAttribute(
                                    BlackboardAttribute(
                                        attID_ex1,
                                        ParseSAMIngestModuleFactory.moduleName,
                                        resultSet3.getInt(Column_Number)))
                            Column_Number = Column_Number + 1

                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error getting values from contacts table (" +
                        e.getMessage() + ")")

        # Clean up
            stmt.close()
            dbConn.close()

#        os.remove(lclDbPath)

#Clean up EventLog directory and files
#        for file in files:
#            try:
#			    os.remove(Temp_Dir + "\\SAM\\" + file.getName())
#            except:
#			    self.log(Level.INFO, "removal of SAM file failed " + Temp_Dir + "\\" + file.getName())
#        try:
#             os.rmdir(Temp_Dir + "\\SAM")
#        except:
#		     self.log(Level.INFO, "removal of SAM directory failed " + Temp_Dir)

# Fire an event to notify the UI and others that there are new artifacts
        IngestServices.getInstance().fireModuleDataEvent(
            ModuleDataEvent(ParseSAMIngestModuleFactory.moduleName,
                            artID_sam_evt, None))

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "SAM Parser",
                                              " SAM Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        # Fire an event to notify the UI and others that there are new artifacts
        IngestServices.getInstance().fireModuleDataEvent(
            ModuleDataEvent(ParseSAMIngestModuleFactory.moduleName,
                            artID_sam_evt, None))

        return IngestModule.ProcessResult.OK
class ParseFileHistoryIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ParseFileHistoryIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        # Get path to EXE based on where this script is run from.
        # Assumes EXE is in same folder as script
        # Verify it is there before any ingest starts
        self.path_to_exe = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "export_FileHistory.exe")
        if not os.path.exists(self.path_to_exe):
            raise IngestModuleException("EXE was not found in module folder")

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Check to see if the artifacts exist and if not then create it, also check to see if the attributes
        # exist and if not then create them
        skCase = Case.getCurrentCase().getSleuthkitCase()

        # This will work in 4.0.1 and beyond
        # Use blackboard class to index blackboard artifacts for keyword search
        blackboard = Case.getCurrentCase().getServices().getBlackboard()

        try:
            self.log(Level.INFO, "Begin Create New Artifacts")
            artID_cat1 = skCase.addArtifactType("TSK_FH_CATALOG_1",
                                                "File History Catalog 1")
        except:
            self.log(Level.INFO, "Artifacts Creation Error, Catalog 1. ==> ")
            artID_cat1 = skCase.getArtifactTypeID("TSK_FH_CATALOG_1")
        try:
            self.log(Level.INFO, "Begin Create New Artifacts")
            artID_cat2 = skCase.addArtifactType("TSK_FH_CATALOG_2",
                                                "File History Catalog 2")
        except:
            self.log(Level.INFO, "Artifacts Creation Error, Catalog 2. ==> ")
            artID_cat2 = skCase.getArtifactTypeID("TSK_FH_CATALOG_2")

        # Create the attribute type, if it exists then catch the error
        try:
            attID_fh_pn = skCase.addArtifactAttributeType(
                'TSK_FH_PATH',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Parent Path")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Prefetch Parent Path. ==> ")

        try:
            attID_fh_fn = skCase.addArtifactAttributeType(
                'TSK_FH_FILE_NAME',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "File Name")
        except:
            self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ")

        try:
            attID_fh_fs = skCase.addArtifactAttributeType(
                'TSK_FH_FILE_SIZE',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "File Size")
        except:
            self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ")

        try:
            attID_fh_usn = skCase.addArtifactAttributeType(
                'TSK_FH_USN_JOURNAL_ENTRY',
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "USN Journal Entry")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, USN Journal Entry. ==> ")

        try:
            attID_fh_fc = skCase.addArtifactAttributeType(
                'TSK_FH_FILE_CREATED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Created")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, File Created. ==> ")

        try:
            attID_fh_fm = skCase.addArtifactAttributeType(
                'TSK_FH_FILE_MODIFIED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Modified")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 3. ==> ")

        try:
            attID_fh_bq = skCase.addArtifactAttributeType(
                'TSK_FH_BACKUP_QUEUED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Queued")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Backup Queued ==> ")

        try:
            attID_fh_bc = skCase.addArtifactAttributeType(
                'TSK_FH_BACKUP_CREATED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Created")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Backup Created ==> ")

        try:
            attID_fh_bcp = skCase.addArtifactAttributeType(
                'TSK_FH_BACKUP_CAPTURED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME,
                "Backup Captured")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Backup Captured. ==> ")

        try:
            attID_fh_bu = skCase.addArtifactAttributeType(
                'TSK_FH_BACKUP_UPDATED', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Updated")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Backup Updated. ==> ")

        try:
            attID_fh_bv = skCase.addArtifactAttributeType(
                'TSK_FH_BACKUP_VISIBLE', BlackboardAttribute.
                TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Visible")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Backup Visible ==> ")

        self.log(Level.INFO, "Get Artifacts after they were created.")
        # Get the new artifacts and attributes that were just created
        #artID_wfh = skCase.getArtifactTypeID("TSK_PREFETCH")
        #artID_cat1 = skCase.getArtifactType("TSK_FH_CATALOG_1")
        #artID_cat2 = skCase.getArtifactType("TSK_FH_CATALOG_2")
        attID_fh_pn = skCase.getAttributeType("TSK_FH_PATH")
        attID_fh_fn = skCase.getAttributeType("TSK_FH_FILE_NAME")
        attID_fh_fs = skCase.getAttributeType("TSK_FH_FILE_SIZE")
        attID_fh_usn = skCase.getAttributeType("TSK_FH_USN_JOURNAL_ENTRY")
        attID_fh_fc = skCase.getAttributeType("TSK_FH_FILE_CREATED")
        attID_fh_fm = skCase.getAttributeType("TSK_FH_FILE_MODIFIED")
        attID_fh_bq = skCase.getAttributeType("TSK_FH_BACKUP_QUEUED")
        attID_fh_bc = skCase.getAttributeType("TSK_FH_BACKUP_CREATED")
        attID_fh_bcp = skCase.getAttributeType("TSK_FH_BACKUP_CAPTURED")
        attID_fh_bu = skCase.getAttributeType("TSK_FH_BACKUP_UPDATED")
        attID_fh_bv = skCase.getAttributeType("TSK_FH_BACKUP_VISIBLE")

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Find the file history files from the users folders
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "%edb",
                                      "%/Windows/FileHistory/%")

        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create file history directory in temp directory, if it exists then continue on processing
        Temp_Dir = Case.getCurrentCase().getTempDirectory() + "\File_History"
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
            os.mkdir(Temp_Dir)
        except:
            self.log(Level.INFO,
                     "File_History Directory already exists " + Temp_Dir)

        # Write out each catalog esedb database to the temp directory
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the DB locally in the temp folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(Temp_Dir,
                                     file.getName() + "_" + str(file.getId()))
            db_name = os.path.splitext(file.getName())[0]
            lclSQLPath = os.path.join(
                Temp_Dir, db_name + "_" + str(file.getId()) + ".db3")
            ContentUtils.writeToFile(file, File(lclDbPath))

            # Run the EXE, saving output to a sqlite database
            self.log(
                Level.INFO, "Running program on data source parm 1 ==> " +
                self.path_to_exe + " " + lclDbPath + " " + lclSQLPath)
            pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath],
                         stdout=PIPE,
                         stderr=PIPE)

            out_text = pipe.communicate()[0]
            self.log(Level.INFO, "Output from run is ==> " + out_text)

            if db_name == "Catalog1":
                artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_1")
                artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_1")
            else:
                artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_2")
                artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_2")

            userpath = file.getParentPath()
            username = userpath.split('/')
            self.log(Level.INFO, "Getting Username " + username[2])

            # Open the DB using JDBC
            try:
                Class.forName("org.sqlite.JDBC").newInstance()
                dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                     lclSQLPath)
            except SQLException as e:
                self.log(
                    Level.INFO, "Could not open database file (not SQLite) " +
                    lclSQLPath + " (" + e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            # Query the contacts table in the database and get all columns.
            try:
                stmt = dbConn.createStatement()
                SQL_Statement = "Select ParentName 'TSK_FH_PATH', Childname 'TSK_FH_FILE_NAME', " + \
                                  "Filesize 'TSK_FH_FILE_SIZE', " + \
                                  "usn 'TSK_FH_USN_JOURNAL_ENTRY', " + \
                                  "FileCreated 'TSK_FH_FILE_CREATED', filemodified 'TSK_FH_FILE_MODIFIED', " + \
                                  "tqueued 'TSK_FH_BACKUP_QUEUED', tcreated 'TSK_FH_BACKUP_CREATED', " + \
                                  "tcaptured 'TSK_FH_BACKUP_CAPTURED', tupdated 'TSK_FH_BACKUP_UPDATED', " + \
                                  "tvisible 'TSK_FH_BACKUP_VISIBLE' from file_history"
                self.log(Level.INFO, "SQL Statement --> " + SQL_Statement)
                resultSet = stmt.executeQuery(SQL_Statement)
            except SQLException as e:
                self.log(
                    Level.INFO,
                    "Error querying database for File_History table (" +
                    e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            # Cycle through each row and create artifacts
            while resultSet.next():
                try:
                    #self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")")
                    FH_Path = resultSet.getString("TSK_FH_PATH")
                    FH_File_Name = resultSet.getString("TSK_FH_FILE_NAME")
                    FH_Filesize = resultSet.getString("TSK_FH_FILE_SIZE")
                    FH_Usn = resultSet.getString("TSK_FH_USN_JOURNAL_ENTRY")
                    FH_FC = resultSet.getInt("TSK_FH_FILE_CREATED")
                    FH_FM = resultSet.getInt("TSK_FH_FILE_MODIFIED")
                    FH_BQ = resultSet.getInt("TSK_FH_BACKUP_QUEUED")
                    FH_BC = resultSet.getInt("TSK_FH_BACKUP_CREATED")
                    FH_BCP = resultSet.getInt("TSK_FH_BACKUP_CAPTURED")
                    FH_BU = resultSet.getInt("TSK_FH_BACKUP_UPDATED")
                    FH_BV = resultSet.getInt("TSK_FH_BACKUP_VISIBLE")
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error getting values from contacts table (" +
                        e.getMessage() + ")")

                # Make artifact for TSK_PREFETCH,  this can happen when custom attributes are fully supported
                art = file.newArtifact(artID_fh)

                # Add the attributes to the artifact.
                art.addAttributes(((BlackboardAttribute(attID_fh_pn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Path)), \
                                  (BlackboardAttribute(attID_fh_fn, ParseFileHistoryIngestModuleFactory.moduleName, FH_File_Name)), \
                                  (BlackboardAttribute(attID_fh_fs, ParseFileHistoryIngestModuleFactory.moduleName, FH_Filesize)), \
                                  (BlackboardAttribute(attID_fh_usn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Usn)), \
                                  (BlackboardAttribute(attID_fh_fc, ParseFileHistoryIngestModuleFactory.moduleName, FH_FC)), \
                                  (BlackboardAttribute(attID_fh_fm, ParseFileHistoryIngestModuleFactory.moduleName, FH_FM)), \
                                  (BlackboardAttribute(attID_fh_bq, ParseFileHistoryIngestModuleFactory.moduleName, FH_BQ)), \
                                  (BlackboardAttribute(attID_fh_bc, ParseFileHistoryIngestModuleFactory.moduleName, FH_BC)), \
                                  (BlackboardAttribute(attID_fh_bcp, ParseFileHistoryIngestModuleFactory.moduleName, FH_BCP)), \
                                  (BlackboardAttribute(attID_fh_bu, ParseFileHistoryIngestModuleFactory.moduleName, FH_BU)), \
                                  (BlackboardAttribute(attID_fh_bv, ParseFileHistoryIngestModuleFactory.moduleName, FH_BV)), \
                                  (BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), \
                                    ParseFileHistoryIngestModuleFactory.moduleName, username[2]))))

                try:
                    #index the artifact for keyword search
                    blackboard.indexArtifact(art)
                except Blackboard.BlackboardException as e:
                    self.log(Level.SEVERE,
                             "Error indexing artifact " + art.getDisplayName())

            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(ParseFileHistoryIngestModuleFactory.moduleName,
                                artID_fh_evt, None))

            # Clean up
            stmt.close()
            dbConn.close()
            #os.remove(lclDbPath)

#Clean up prefetch directory and files
        try:
            shutil.rmtree(Temp_Dir)
        except:
            self.log(Level.INFO,
                     "removal of directory tree failed " + Temp_Dir)

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "Windows File History Parser",
            " Windows File History Has Been Parsed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
Beispiel #24
0
class EmuleDataSourceIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(EmuleIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    def startUp(self, context):
        self.context = context

        #self.path_to_exe = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".exe")
        #if not os.path.exists(self.path_to_exe):
        #    raise IngestModuleException("EXE was not found in module folder")
        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
# raise IngestModuleException("Oh No!")

    def process(self, dataSource, progressBar):

        fileManager = Case.getCurrentCase().getServices().getFileManager()

        skCase = Case.getCurrentCase().getSleuthkitCase()

        #Menu elements for Emule

        try:
            self.log(Level.INFO, "Begin Create New Artifacts")
            artID_eu = skCase.addArtifactType("TSK_EMULE", "Emule User Info")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_eu = skCase.getArtifactTypeID("TSK_EMULE")

        try:
            artID_usage = skCase.addArtifactType("TSK_EMULE_USAGE",
                                                 "Emule Usage Info")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_usage = skCase.getArtifactTypeID("TSK_EMULE_USAGE")

        try:
            artID_files = skCase.addArtifactType("TSK_FILES",
                                                 "Emule Files Downloaded")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_files = skCase.getArtifactTypeID("TSK_FILES")

        try:
            artID_ed2k = skCase.addArtifactType("TSK_ED2K",
                                                "Emule Ongoing Downloads")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_ed2k = skCase.getArtifactTypeID("TSK_ED2K")

        try:
            artID_incoming_folder = skCase.addArtifactType(
                "TSK_INCOMING_FOLDER", "Incoming Folder")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_incoming_folder = skCase.getArtifactTypeID(
                "TSK_INCONMING_FOLDER")

        #Menu elements for Torrent clients

        try:
            artID_torrent_ongoing = skCase.addArtifactType(
                "TSK_TORRENT_ONGOING", "Torrent Ongoing downloads")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_torrent_ongoing = skCase.getArtifactTypeID(
                "TSK_TORRENT_ONGOING")

        try:
            artID_torrent_added = skCase.addArtifactType(
                "TSK_TORRENTS", "Torrents added")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_torrent_added = skCase.getArtifactTypeID("TSK_TORRENTS")

        #Menu Items

        try:
            attID_torrent_name = skCase.addArtifactAttributeType(
                "TSK_TORRENT_NAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Torrent Name")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Torrent Name. ==> ")

        try:
            attID_incoming_file = skCase.addArtifactAttributeType(
                "TSK_MD5_HASH",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "MD5 Hash")
        except:
            self.log(Level.INFO, "Attributes Creation Error, MD5 Hash. ==> ")

        try:
            attID_incoming_file = skCase.addArtifactAttributeType(
                "TSK_CREATED_TIME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Created Time")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Created time. ==> ")

        try:
            attID_ed2k_link = skCase.addArtifactAttributeType(
                "TSK_EMULE_SEARCHES",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Emule Searches")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Emule Searches. ==> ")

        try:
            attID_ed2k_link = skCase.addArtifactAttributeType(
                "TSK_ED2K_LINK",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "ED2K Link")
        except:
            self.log(Level.INFO, "Attributes Creation Error, ED2K Link. ==> ")

        try:
            attID_ed2k_partfile = skCase.addArtifactAttributeType(
                "TSK_ED2K_PARTFILE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Partfile")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Partfile. ==> ")

        try:
            attID_username = skCase.addArtifactAttributeType(
                "TSK_EMULE_USERNAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Nickname")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Nickname. ==> ")

        try:
            attID_version = skCase.addArtifactAttributeType(
                "TSK_EMULE_VERSION",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Emule Version")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Emule version ")

        try:
            attID_language = skCase.addArtifactAttributeType(
                "TSK_EMULE_LANGUAGE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Emule Language")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Emule language")

        try:
            attID_incoming_dir = skCase.addArtifactAttributeType(
                "TSK_EMULE_INCOMING",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Incoming Dir")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Incoming Dir")

        try:
            attID_userhash = skCase.addArtifactAttributeType(
                "TSK_EMULE_USERHASH",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Userhash")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Userhash")

        try:
            attID_completed_files = skCase.addArtifactAttributeType(
                "TSK_EMULE_COMPLETED_FILES",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Completed Files")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Completed Files ")

        try:
            attID_downloaded_bytes = skCase.addArtifactAttributeType(
                "TSK_EMULE_DOWNLOADED_BYTES",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Downloaded Bytes")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_filename = skCase.addArtifactAttributeType(
                "TSK_EMULE_FILENAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Filename")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_filesize = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_FILESIZE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Filesize")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_partfile = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_PARTFILE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Partfile")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_request = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_REQUEST",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Requests")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_accepted = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_ACCEPTED",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Accepted Requests")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_uploaded = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_UPLOADED",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Uploaded")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        try:
            attID_priority = skCase.addArtifactAttributeType(
                "TSK_EMULE_ED2K_PRIORITY",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Priority")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Downloaded Bytes")

        #Emule User Info
        artID_eu = skCase.getArtifactTypeID("TSK_EMULE")
        artID_eu_evt = skCase.getArtifactType("TSK_EMULE")
        attID_fn = skCase.getAttributeType("TSK_EMULE_USERNAME")
        attID_userhash = skCase.getAttributeType("TSK_EMULE_USERHASH")
        attID_ev = skCase.getAttributeType("TSK_EMULE_VERSION")
        attID_ln = skCase.getAttributeType("TSK_EMULE_LANGUAGE")
        attID_inc = skCase.getAttributeType("TSK_EMULE_INCOMING")

        #Emule usage info
        attID_usage = skCase.getArtifactTypeID("TSK_EMULE_USAGE")
        attID_usage_evt = skCase.getArtifactType("TSK_EMULE_USAGE")
        attID_emule_searches = skCase.getAttributeType("TSK_EMULE_SEARCHES")
        attID_cf = skCase.getAttributeType("TSK_EMULE_COMPLETED_FILES")
        attID_db = skCase.getAttributeType("TSK_EMULE_DOWNLOADED_BYTES")

        #Emule File Downloads
        attID_files = skCase.getArtifactTypeID("TSK_FILES")
        attID_files_evt = skCase.getArtifactType("TSK_FILES")
        attID_filename = skCase.getAttributeType("TSK_EMULE_FILENAME")
        attID_filesize = skCase.getAttributeType("TSK_EMULE_ED2K_FILESIZE")
        attID_uploaded = skCase.getAttributeType("TSK_EMULE_ED2K_UPLOADED")
        attID_request = skCase.getAttributeType("TSK_EMULE_ED2K_REQUEST")
        attID_accepted = skCase.getAttributeType("TSK_EMULE_ED2K_ACCEPTED")
        attID_priority = skCase.getAttributeType("TSK_EMULE_ED2K_PRIORITY")
        attID_partfile = skCase.getAttributeType("TSK_EMULE_ED2K_PARTFILE")

        #Ongoing Downloads - ED2K links
        artID_ed2k = skCase.getArtifactTypeID("TSK_ED2K")
        artID_ed2k_evt = skCase.getArtifactType("TSK_ED2K")
        attID_ed2k_link = skCase.getAttributeType("TSK_ED2K_LINK")
        attID_ed2k_partfile = skCase.getAttributeType("TSK_ED2K_PARTFILE")

        #Incoming folder
        artID_incoming_folder = skCase.getArtifactTypeID("TSK_INCOMING_FOLDER")
        artID_incoming_evt = skCase.getArtifactType("TSK_INCOMING_FOLDER")
        attID_md5_hash = skCase.getAttributeType("TSK_MD5_HASH")
        attID_crtime = skCase.getAttributeType("TSK_CREATED_TIME")

        #Torrent
        artID_torrent_added = skCase.getArtifactTypeID("TSK_TORRENTS")
        artID_torrent_evt = skCase.getArtifactType("TSK_TORRENTS")

        artID_torrent_ongoing = skCase.getArtifactTypeID("TSK_TORRENT_ONGOING")
        artID_torrentOng_evt = skCase.getArtifactTypeID("TSK_TORRENT_ONGOING")
        attID_torrent_name = skCase.getAttributeType("TSK_TORRENT_NAME")

        emuleConfigFiles = fileManager.findFiles(dataSource, "%",
                                                 "/eMule/config")
        self.log(Level.INFO, "P2P Emule Module Starting")

        fileCount = 0

        incomingDir = ''

        for file in emuleConfigFiles:

            #Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #Emule Settings
            if "preferences.ini" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))

                f = open(configFilesPath, 'r')
                incomingDir = ''
                nick = ''
                lang = ''

                for line in f:
                    if "Nick=" in line and "IRC" not in line:
                        nick = line.rsplit('=', 1)[1]
                    if "AppVersion=" in line:
                        appVersion = line.rsplit('=', 1)[1]
                    if "Language=" in line:
                        lang = line.rsplit('=', 1)[1]
                        intLang = int(lang)
                        if intLang == 1034:
                            lang = "Spanish"
                        if intLang == 1033:
                            lang = "English - USA"
                        if intLang == 2057:
                            lang = "English - UK"

                        #TODO add more id to lang
                        #choices = {'a': 1, 'b': 2}
                        #result = choices.get(key, 'default')

                    if "IncomingDir=" in line:
                        incomingDir = line.rsplit('=', 1)[1]

                art = file.newArtifact(artID_eu)
                art.addAttributes(((BlackboardAttribute(attID_fn, EmuleIngestModuleFactory.moduleName, nick)), \
                (BlackboardAttribute(attID_userhash, EmuleIngestModuleFactory.moduleName, '')), \
                (BlackboardAttribute(attID_ev, EmuleIngestModuleFactory.moduleName, appVersion)), \
                (BlackboardAttribute(attID_ln, EmuleIngestModuleFactory.moduleName, lang)), \
                (BlackboardAttribute(attID_inc, EmuleIngestModuleFactory.moduleName, incomingDir))))

                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                    artID_eu_evt, None))
                f.close()

            #Emule statiscts
            if "statistics.ini" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))
                completedFiles = '0'
                donwladedBytes = '0'

                f2 = open(configFilesPath, 'r')

                for line in f2:
                    if "DownCompletedFiles=" in line:
                        completedFiles = line.rsplit('=', 1)[1]

                    if "TotalDownloadedBytes=" in line:
                        donwladedBytes = line.rsplit('=', 1)[1]

                art = file.newArtifact(attID_usage)

                art.addAttributes(((BlackboardAttribute(attID_cf, EmuleIngestModuleFactory.moduleName, completedFiles)), \
                (BlackboardAttribute(attID_db, EmuleIngestModuleFactory.moduleName, donwladedBytes))))

                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                    attID_usage_evt, None))

            #Emule Userhash
            if "preferences.dat" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))

                userHash = ''

                fobj = open(configFilesPath, "rb")

                block = (fobj.read(17))
                block = binascii.hexlify(block)
                userHash = (block[2:34])

                art = file.newArtifact(artID_eu)
                art.addAttribute(
                    BlackboardAttribute(attID_userhash,
                                        EmuleIngestModuleFactory.moduleName,
                                        userHash))
                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                    artID_eu_evt, None))

                fobj.close()

            #Search words last used
            if "AC_SearchStrings.dat" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))

                f = open(configFilesPath)
                searches = ''

                for line in f:
                    searches = line.replace("\00", "")
                    searches = searches.encode('ascii', errors='ignore')
                    if len(str(searches)) > 0:
                        art = file.newArtifact(attID_usage)
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_emule_searches,
                                EmuleIngestModuleFactory.moduleName,
                                searches.strip()))
                        IngestServices.getInstance().fireModuleDataEvent(
                            ModuleDataEvent(
                                EmuleIngestModuleFactory.moduleName,
                                attID_usage_evt, None))

            #Ongoing downloads
            if "downloads.txt" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))

                f = open(configFilesPath, "r")

                for line in f:
                    ed2k = line.replace("\00", "")
                    if "part" in ed2k:
                        art = file.newArtifact(artID_ed2k)

                        ed2k = ed2k.split('part')
                        partfile = ed2k[0] + "part"
                        ed2kLinks = ed2k[1].strip()

                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ed2k_link,
                                EmuleIngestModuleFactory.moduleName,
                                ed2kLinks))
                        art.addAttribute(
                            BlackboardAttribute(
                                attID_ed2k_partfile,
                                EmuleIngestModuleFactory.moduleName, partfile))
                        IngestServices.getInstance().fireModuleDataEvent(
                            ModuleDataEvent(
                                EmuleIngestModuleFactory.moduleName,
                                artID_ed2k_evt, None))

            #Information about all files that have been downloaded
            if "known.met" in file.getName():
                configFilesPath = os.path.join(
                    Case.getCurrentCase().getTempDirectory(),
                    str(file.getName()))
                ContentUtils.writeToFile(file, File(configFilesPath))

                fobj = open(configFilesPath, "rb")
                filesize = os.path.getsize(configFilesPath)

                for i in range(filesize):
                    fobj.seek(i, 0)
                    charakter = (fobj.read(4))

                    if charakter == b"\x02\x01\x00\x01":  # TAG Filename in known.met file
                        block = getblockofdata(i, fobj, filesize)
                        filename = carvefilename(block)
                        filesizeentry = carvefilesize(block)
                        totalupload = carvetotalupload(block)
                        requests = carverequests(block)
                        acceptedrequests = carveacceptedrequests(block)
                        uploadpriority = carveuploadpriority(block)
                        partfile = carvepartfile(block)

                        art = file.newArtifact(attID_files)

                        art.addAttributes(((BlackboardAttribute(attID_filename, EmuleIngestModuleFactory.moduleName, filename)), \
                        (BlackboardAttribute(attID_filesize, EmuleIngestModuleFactory.moduleName, str(filesizeentry))), \
                        (BlackboardAttribute(attID_uploaded, EmuleIngestModuleFactory.moduleName, str(totalupload))), \
                        (BlackboardAttribute(attID_request, EmuleIngestModuleFactory.moduleName, str(requests))), \
                        (BlackboardAttribute(attID_accepted, EmuleIngestModuleFactory.moduleName, str(acceptedrequests))), \
                        (BlackboardAttribute(attID_priority, EmuleIngestModuleFactory.moduleName, str(uploadpriority))), \
                        (BlackboardAttribute(attID_partfile, EmuleIngestModuleFactory.moduleName, str(partfile)))))

                        IngestServices.getInstance().fireModuleDataEvent(
                            ModuleDataEvent(
                                EmuleIngestModuleFactory.moduleName,
                                attID_files_evt, None))

        # If incoming dir is located
        if incomingDir:
            incoming = incomingDir.split(':')
            incoming = str(incoming[1]).replace("\\", "/").strip()
            incomingFiles = fileManager.findFiles(dataSource, "%",
                                                  str(incoming))

            for file in incomingFiles:

                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                if not ("." == file.getName()) and not (".."
                                                        == file.getName()):
                    md5 = file.getMd5Hash()
                    crtime = str(file.getCrtime())
                    crtime = time.strftime('%Y-%m-%d %H:%M:%S',
                                           time.localtime(float(crtime)))
                    if md5 is None:
                        md5 = ''
                    art = file.newArtifact(artID_incoming_folder)
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_md5_hash,
                            EmuleIngestModuleFactory.moduleName, md5))
                    art.addAttribute(
                        BlackboardAttribute(
                            attID_crtime, EmuleIngestModuleFactory.moduleName,
                            crtime))
                    IngestServices.getInstance().fireModuleDataEvent(
                        ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                        artID_incoming_evt, None))

        #Utorrent Forensic \Roaming\uTorrent
        uTorrentForensic = fileManager.findFiles(dataSource, "%",
                                                 "/Roaming/uTorrent")

        for file in uTorrentForensic:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            # Files added to uTorrent, potentialy downloaded
            if ".torrent" in file.getName():
                art = file.newArtifact(artID_torrent_added)
                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                    artID_torrent_evt, None))

            # Current downloads
            if "resume.dat" in file.getName():
                try:
                    configFilesPath = os.path.join(
                        Case.getCurrentCase().getTempDirectory(),
                        str(file.getName()))
                    ContentUtils.writeToFile(file, File(configFilesPath))

                    f = open(configFilesPath, "rb")
                    d = decode(f.read())

                    for line in d:
                        if not (".fileguard" == line) and not ("rec" == line):
                            self.log(Level.INFO, line)
                            art = file.newArtifact(artID_torrent_ongoing)
                            art.addAttribute(
                                BlackboardAttribute(
                                    attID_torrent_name,
                                    EmuleIngestModuleFactory.moduleName,
                                    str(line)))
                    IngestServices.getInstance().fireModuleDataEvent(
                        ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                        artID_torrentOng_evt, None))
                except:
                    self.log(Level.INFO, "Error parsing resume.dat file")

        #BitTorrent Forensic \Roaming\uTorrent
        BitTorrentForensic = fileManager.findFiles(dataSource, "%",
                                                   "/Roaming/BitTorrent")

        for file in BitTorrentForensic:
            self.log(Level.INFO, str(file.getName()))
            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            # Files added to uTorrent, potentialy downloaded
            if ".torrent" in file.getName():
                art = file.newArtifact(artID_torrent_added)
                IngestServices.getInstance().fireModuleDataEvent(
                    ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                    artID_torrent_evt, None))

            # Current downloads
            if "resume.dat" in file.getName():
                try:
                    configFilesPath = os.path.join(
                        Case.getCurrentCase().getTempDirectory(),
                        str(file.getName()))
                    ContentUtils.writeToFile(file, File(configFilesPath))

                    f = open(configFilesPath, "rb")
                    d = decode(f.read())

                    for line in d:
                        if not (".fileguard" == line) and not ("rec" == line):
                            self.log(Level.INFO, line)
                            art = file.newArtifact(artID_torrent_ongoing)
                            art.addAttribute(
                                BlackboardAttribute(
                                    attID_torrent_name,
                                    EmuleIngestModuleFactory.moduleName,
                                    str(line)))
                    IngestServices.getInstance().fireModuleDataEvent(
                        ModuleDataEvent(EmuleIngestModuleFactory.moduleName,
                                        artID_torrentOng_evt, None))
                except:
                    self.log(Level.INFO, "Error parsing resume.dat file")

        #Post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "P2P Forensic Finish",
            "Found files related with P2P downloads")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
 def log(self, level, msg):
     self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
     self._logger = Logger.getLogger(self.__class__.__name__)
Beispiel #26
0
 def __init__(self, settings):
     self.context = None
     self.local_settings = settings
     self._logger = Logger.getLogger(self.__class__.__name__)
     self._logger.log(Level.SEVERE, "Starting of plugin")
Beispiel #27
0
 def __init__(self):
     self._logger = Logger.getLogger(self.__class__.__name__)
Beispiel #28
0
class ProcessAppxregProgramsIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(
        ProcessAppxregProgramsIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")

    def startUp(self, context):
        self.context = context
        if PlatformUtil.isWindowsOS():
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), "appxreg.exe")
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "Windows Executable was not found in module folder")
        elif PlatformUtil.getOSName() == 'Linux':
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), 'Appxreg')
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "Linux Executable was not found in module folder")
        pass

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # get current case and the store.vol abstract file information
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "usrclass.dat")
        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create Event Log directory in temp directory, if it exists then continue on processing
        temporaryDirectory = os.path.join(
            Case.getCurrentCase().getTempDirectory(), "Appxreg_Programs")
        #self.log(Level.INFO, "create Directory " + moduleDirectory)
        try:
            os.mkdir(temporaryDirectory)
        except:
            pass
            #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory)

        # Write out each users store.vol file and process it.
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the file locally. Use file id as name to reduce collisions
            extractedFile = os.path.join(
                temporaryDirectory,
                str(file.getId()) + "-" + file.getName())
            ContentUtils.writeToFile(file, File(extractedFile))
            #os.remove(extractedFile)

        for file in files:

            extractedFile = os.path.join(
                temporaryDirectory,
                str(file.getId()) + "-" + file.getName())
            dbFile = os.path.join(temporaryDirectory, "appxreg.db3")

            self.log(
                Level.INFO,
                "Running prog ==> " + "appxreg.exe " + extractedFile + dbFile)
            pipe = Popen([self.path_to_exe, extractedFile, dbFile],
                         stdout=PIPE,
                         stderr=PIPE)
            out_text = pipe.communicate()[0]
            self.log(Level.INFO, "Output from run is ==> " + out_text)

            artIdInsProg = skCase.getArtifactTypeID("TSK_INSTALLED_PROG")
            artIdInsProgType = skCase.getArtifactType("TSK_INSTALLED_PROG")

            moduleName = ProcessAppxregProgramsIngestModuleFactory.moduleName

            try:
                Class.forName("org.sqlite.JDBC").newInstance()
                dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % dbFile)
            except SQLException as e:
                self.log(
                    Level.INFO, "Could not open database file (not SQLite) " +
                    extractedFile + " (" + e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            try:
                stmt = dbConn.createStatement()
                resultSet = stmt.executeQuery(
                    "select package_name, substr(install_dttm_ms_epoch,1,11) -11644473600 Installed_DTTM from installed_apps;"
                )
                self.log(Level.INFO, "query Installed_Application tables")
            except SQLException as e:
                self.log(
                    Level.INFO, "Error querying database for appx tables (" +
                    e.getMessage() + ") ")
                return IngestModule.ProcessResult.OK

            # Cycle through each row and get the installed programs and install time
            while resultSet.next():
                try:
                    artInsProg = file.newArtifact(artIdInsProg)
                    attributes = ArrayList()
                    attributes.add(
                        BlackboardAttribute(
                            BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME,
                            moduleName, resultSet.getString("package_name")))
                    attributes.add(
                        BlackboardAttribute(
                            BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.
                            getTypeID(), moduleName,
                            resultSet.getInt("Installed_DTTM")))

                    artInsProg.addAttributes(attributes)

                    # index the artifact for keyword search
                    try:
                        blackboard.indexArtifact(artInsProg)
                    except:
                        pass
                except SQLException as e:
                    self.log(
                        Level.INFO, "Error getting values from Appx tables (" +
                        e.getMessage() + ")")

        # Close the database statement
            try:
                stmt.close()
                dbConn.close()
            except:
                pass

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "Appxreg Installed Programs",
            " Appxreg Installed Programs Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
class ContactsDbIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ContactsDbIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context
        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException("Oh No!")

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Find files named contacts.db, regardless of parent path
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "contacts.db")

        numFiles = len(files)
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the DB locally in the temp folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(),
                                     str(file.getId()) + ".db")
            ContentUtils.writeToFile(file, File(lclDbPath))

            # Open the DB using JDBC
            try:
                Class.forName("org.sqlite.JDBC").newInstance()
                dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                     lclDbPath)
            except SQLException as e:
                self.log(
                    Level.INFO, "Could not open database file (not SQLite) " +
                    file.getName() + " (" + e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            # Query the contacts table in the database and get all columns.
            try:
                stmt = dbConn.createStatement()
                resultSet = stmt.executeQuery("SELECT * FROM contacts")
            except SQLException as e:
                self.log(
                    Level.INFO,
                    "Error querying database for contacts table (" +
                    e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            # Cycle through each row and create artifacts
            while resultSet.next():
                try:
                    name = resultSet.getString("name")
                    email = resultSet.getString("email")
                    phone = resultSet.getString("phone")
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error getting values from contacts table (" +
                        e.getMessage() + ")")

                # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields
                art = file.newArtifact(
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT)

                art.addAttribute(
                    BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.
                        getTypeID(), ContactsDbIngestModuleFactory.moduleName,
                        name))

                art.addAttribute(
                    BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(
                        ), ContactsDbIngestModuleFactory.moduleName, email))

                art.addAttribute(
                    BlackboardAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.
                        getTypeID(), ContactsDbIngestModuleFactory.moduleName,
                        phone))

            # Fire an event to notify the UI and others that there are new artifacts
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(ContactsDbIngestModuleFactory.moduleName,
                                BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT,
                                None))

            # Clean up
            stmt.close()
            dbConn.close()
            os.remove(lclDbPath)

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "ContactsDb Analyzer",
                                              "Found %d files" % fileCount)
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
Beispiel #30
0
 def __init__(self):
     self._logger = Logger.getLogger(self.__class__.__name__)
     self.current_case = None
     self.PROGRAM_NAME = "Google Maps History"
     self.CAT_DESTINATION = "Destination"
Beispiel #31
0
class ParseSRUDBIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ParseSRUDBIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self.List_Of_SRUDB = []

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        # Get path to EXE based on where this script is run from.
        # Assumes EXE is in same folder as script
        # Verify it is there before any ingest starts
        if PlatformUtil.isWindowsOS():
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), "export_SRUDB.exe")
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "EXE was not found in module folder")
        else:
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), "Export_SRUDB")
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "Linux Executable was not found in module folder")

        if self.local_settings.getSetting('all') == 'true':
            self.List_Of_SRUDB.append('application_resource_usage')
            self.List_Of_SRUDB.append('energy_estimation_provider')
            self.List_Of_SRUDB.append('energy_usage_data')
            self.List_Of_SRUDB.append('network_connectivity')
            self.List_Of_SRUDB.append('network_usage')
            self.List_Of_SRUDB.append('windows_push_notification')
            #self.logger.logp(Level.INFO, Parse_SRUDBWithUI.__name__, "startUp", "All Events CHecked")
        else:
            #self.logger.logp(Level.INFO, Parse_SRUDBWithUI.__name__, "startUp", "No Boxes Checked")
            if self.local_settings.getSetting(
                    'application_resource_usage') == 'true':
                self.List_Of_SRUDB.append('application_resource_usage')
            if self.local_settings.getSetting(
                    'energy_estimation_provider') == 'true':
                self.List_Of_SRUDB.append('energy_estimation_provider')
            if self.local_settings.getSetting('energy_usage_data') == 'true':
                self.List_Of_SRUDB.append('energy_usage_data')
            if self.local_settings.getSetting(
                    'network_connectivity') == 'true':
                self.List_Of_SRUDB.append('network_connectivity')
            if self.local_settings.getSetting('network_usage') == 'true':
                self.List_Of_SRUDB.append('network_usage')
            if self.local_settings.getSetting(
                    'windows_push_notification') == 'true':
                self.List_Of_SRUDB.append('windows_push_notification')

        #self.logger.logp(Level.INFO, Parse_SRUDBWithUI.__name__, "startUp", str(self.List_Of_Events))
        self.log(Level.INFO, str(self.List_Of_SRUDB))

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Set the database to be read to the once created by the prefetch parser program
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "SRUDB.DAT")
        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create Event Log directory in temp directory, if it exists then continue on processing
        Temp_Dir = Case.getCurrentCase().getTempDirectory()
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
            temp_dir = os.path.join(Temp_Dir, "SRUDB")
            os.mkdir(temp_dir)
        except:
            self.log(Level.INFO, "SRUDB Directory already exists " + Temp_Dir)

        temp_file = ""
        # Write out each Event Log file to the temp directory
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the DB locally in the temp folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(temp_dir, file.getName())
            ContentUtils.writeToFile(file, File(lclDbPath))
            temp_file = lclDbPath

        # Run the executable, saving output to a sqlite database

        self.log(
            Level.INFO,
            "Running program on data source parm 1 ==> " + self.path_to_exe +
            " == > " + temp_file + "  Parm 2 ==> " + Temp_Dir + "\SRUDB.db3")
        subprocess.Popen(
            [self.path_to_exe, temp_file,
             os.path.join(Temp_Dir, "SRUDB.db3")]).communicate()[0]

        for file in files:
            # Open the DB using JDBC
            lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(),
                                     "SRUDB.db3")
            self.log(Level.INFO,
                     "Path the SRUDB database file created ==> " + lclDbPath)
            try:
                Class.forName("org.sqlite.JDBC").newInstance()
                dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                     lclDbPath)
            except SQLException as e:
                self.log(
                    Level.INFO, "Could not open database file (not SQLite) " +
                    file.getName() + " (" + e.getMessage() + ")")
                return IngestModule.ProcessResult.OK

            #PSlsit => TSK_PROG_RUN
#

# Query the contacts table in the database and get all columns.
            for SR_table_name in self.List_Of_SRUDB:
                try:
                    stmt = dbConn.createStatement()
                    resultSet = stmt.executeQuery(
                        "Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('"
                        + SR_table_name + "'); ")
                    self.log(Level.INFO, "query SQLite Master table")
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error querying database for Prefetch table (" +
                        e.getMessage() + ")")
                    return IngestModule.ProcessResult.OK

                # Cycle through each row and create artifacts
                while resultSet.next():
                    try:
                        self.log(
                            Level.INFO,
                            "Result (" + resultSet.getString("tbl_name") + ")")
                        table_name = resultSet.getString("tbl_name")
                        self.log(
                            Level.INFO, "Result get information from table " +
                            resultSet.getString("tbl_name") + " ")
                        SQL_String_1 = "Select * from " + table_name + ";"
                        SQL_String_2 = "PRAGMA table_info('" + table_name + "')"
                        #self.log(Level.INFO, SQL_String_1)
                        #self.log(Level.INFO, SQL_String_2)
                        artifact_name = "TSK_" + table_name.upper()
                        artifact_desc = "System Resource Usage " + table_name.upper(
                        )
                        try:
                            self.log(Level.INFO, "Begin Create New Artifacts")
                            artID_amc = skCase.addArtifactType(
                                artifact_name, artifact_desc)
                        except:
                            self.log(
                                Level.INFO,
                                "Artifacts Creation Error, some artifacts may not exist now. ==> "
                            )

                        artID_sru = skCase.getArtifactTypeID(artifact_name)
                        artID_sru_evt = skCase.getArtifactType(artifact_name)

                        Column_Names = []
                        Column_Types = []
                        resultSet2 = stmt.executeQuery(SQL_String_2)
                        while resultSet2.next():
                            Column_Names.append(
                                resultSet2.getString("name").upper())
                            Column_Types.append(
                                resultSet2.getString("type").upper())
                            #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name"))
                            #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                            if resultSet2.getString("type").upper() == "TEXT":
                                try:
                                    attID_ex1 = skCase.addArtifactAttributeType(
                                        "TSK_" +
                                        resultSet2.getString("name").upper(),
                                        BlackboardAttribute.
                                        TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.
                                        STRING, resultSet2.getString("name"))
                                    #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                                except:
                                    self.log(
                                        Level.INFO,
                                        "Attributes Creation Error, " +
                                        resultSet2.getString("name") + " ==> ")
                            elif resultSet2.getString("type").upper() == "":
                                try:
                                    attID_ex1 = skCase.addArtifactAttributeType(
                                        "TSK_" +
                                        resultSet2.getString("name").upper(),
                                        BlackboardAttribute.
                                        TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.
                                        STRING, resultSet2.getString("name"))
                                    #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                                except:
                                    self.log(
                                        Level.INFO,
                                        "Attributes Creation Error, " +
                                        resultSet2.getString("name") + " ==> ")
                            else:
                                try:
                                    attID_ex1 = skCase.addArtifactAttributeType(
                                        "TSK_" +
                                        resultSet2.getString("name").upper(),
                                        BlackboardAttribute.
                                        TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.
                                        LONG, resultSet2.getString("name"))
                                    #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1))
                                except:
                                    self.log(
                                        Level.INFO,
                                        "Attributes Creation Error, " +
                                        resultSet2.getString("name") + " ==> ")

                        resultSet3 = stmt.executeQuery(SQL_String_1)
                        while resultSet3.next():
                            art = file.newArtifact(artID_sru)
                            Column_Number = 1
                            for col_name in Column_Names:
                                self.log(
                                    Level.INFO,
                                    "Result get information for column " +
                                    Column_Names[Column_Number - 1] + " ")
                                self.log(
                                    Level.INFO,
                                    "Result get information for column_number "
                                    + str(Column_Number) + " ")
                                c_name = "TSK_" + col_name
                                self.log(Level.INFO,
                                         "Attribute Name is " + c_name + " ")
                                attID_ex1 = skCase.getAttributeType(c_name)
                                if Column_Types[Column_Number - 1] == "TEXT":
                                    art.addAttribute(
                                        BlackboardAttribute(
                                            attID_ex1,
                                            ParseSRUDBIngestModuleFactory.
                                            moduleName,
                                            resultSet3.getString(
                                                Column_Number)))
                                elif Column_Types[Column_Number - 1] == "":
                                    art.addAttribute(
                                        BlackboardAttribute(
                                            attID_ex1,
                                            ParseSRUDBIngestModuleFactory.
                                            moduleName,
                                            resultSet3.getString(
                                                Column_Number)))
                                else:
                                    #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ")
                                    art.addAttribute(
                                        BlackboardAttribute(
                                            attID_ex1,
                                            ParseSRUDBIngestModuleFactory.
                                            moduleName,
                                            long(
                                                resultSet3.getInt(
                                                    Column_Number))))
                                Column_Number = Column_Number + 1

                        IngestServices.getInstance().fireModuleDataEvent(
                            ModuleDataEvent(
                                ParseSRUDBIngestModuleFactory.moduleName,
                                artID_sru_evt, None))
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Error getting values from contacts table (" +
                            e.getMessage() + ")")

        # Clean up
        try:
            os.remove(lclDbPath)
        except:
            self.log(Level.INFO, "removal of " + lclDbPath + " Failed")

#Clean up EventLog directory and files
        for file in files:
            try:
                os.remove(temp_file)
            except:
                self.log(
                    Level.INFO, "removal of SRUDB file failed " + Temp_Dir +
                    "\\" + file.getName())
        try:
            os.rmdir(temp_dir)
        except:
            self.log(Level.INFO,
                     "removal of SRUDB directory failed " + Temp_Dir)

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "System Resourse Usage DB",
                                              " SRUDB Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
class MacFSEventsIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(MacFSEventsIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self.database_file = ""
        self.Plugins_for_SQL = ('FolderEvent','Mount','Unmount','EndOfTransaction','LastHardLinkRemoved','HardLink', \
                                'SymbolicLink','FileEvent','PermissionChange','ExtendedAttrModified','ExtendedAttrRemoved', \
                                'DocumentRevisioning','Created','Removed','InodeMetaMod','Renamed','Modified', \
                                'Exchange','FinderInfoMod','FolderCreated')
        self.Plugin_Like_Stmt = ""

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        #Show parameters that are passed in
        if PlatformUtil.isWindowsOS():
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)),
                "fseparser_v2.1.exe")
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "Windows Executable was not found in module folder")
        elif PlatformUtil.getOSName() == 'Linux':
            self.path_to_exe = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), "FSEParser_V2.1")
            if not os.path.exists(self.path_to_exe):
                raise IngestModuleException(
                    "Linux Executable was not found in module folder")

        self.MacFSEvents_Executable = self.path_to_exe
        self.log(Level.INFO,
                 "MacFSEvents Executable ==> " + self.MacFSEvents_Executable)

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException(IngestModule(), "Oh No!")
        pass

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See:x http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        self.log(
            Level.INFO,
            "Starting to process, Just before call to parse_safari_history")

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Get the temp directory and create the sub directory
        Temp_Dir = Case.getCurrentCase().getTempDirectory()
        temp_dir = os.path.join(Temp_Dir, "MacFSEvents")
        try:
            os.mkdir(temp_dir)
        except:
            self.log(Level.INFO,
                     "FSEvents Directory already exists " + temp_dir)

        # Set the database to be read to the once created by the prefetch parser program
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "%", ".fseventsd")
        numFiles = len(files)

        for file in files:
            #self.log(Level.INFO, "Files ==> " + file.getName())
            if (file.getName() == "..") or (file.getName()
                                            == '.') or (file.getName()
                                                        == 'fseventsd-uuid'):
                pass
                #self.log(Level.INFO, "Files ==> " + str(file))
            else:
                # Check if the user pressed cancel while we were busy
                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                # Save the DB locally in the temp folder. use file id as name to reduce collisions
                filePath = os.path.join(temp_dir, file.getName())
                ContentUtils.writeToFile(file, File(filePath))

        self.log(Level.INFO, "Number of files to process ==> " + str(numFiles))
        self.log(Level.INFO, "Running program ==> " + self.MacFSEvents_Executable + " -c Autopsy " + "-o " + temp_dir + \
                             " -s " + Temp_Dir + "\MacFSEvents")
        pipe = Popen([
            self.MacFSEvents_Executable, "-c", "Autopsy", "-o", temp_dir, "-s",
            temp_dir
        ],
                     stdout=PIPE,
                     stderr=PIPE)
        out_text = pipe.communicate()[0]
        self.log(Level.INFO, "Output from run is ==> " + out_text)

        database_file = os.path.join(
            temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite")

        #open the database to get the SQL and artifact info out of
        try:
            head, tail = os.path.split(os.path.abspath(__file__))
            settings_db = os.path.join(head, "fsevents_sql.db3")
            Class.forName("org.sqlite.JDBC").newInstance()
            dbConn1 = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                  settings_db)
        except SQLException as e:
            self.log(
                Level.INFO, "Could not open database file (not SQLite) " +
                database_file + " (" + e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        try:
            stmt1 = dbConn1.createStatement()
            sql_statement1 = "select distinct artifact_name, artifact_title from extracted_content_sql;"
            #self.log(Level.INFO, "SQL Statement ==> " + sql_statement)
            resultSet1 = stmt1.executeQuery(sql_statement1)
            while resultSet1.next():
                try:
                    self.log(Level.INFO, "Begin Create New Artifacts")
                    artID_fse = skCase.addArtifactType(
                        resultSet1.getString("artifact_name"),
                        resultSet1.getString("artifact_title"))
                except:
                    self.log(
                        Level.INFO, "Artifacts Creation Error, " +
                        resultSet1.getString("artifact_name") +
                        " some artifacts may not exist now. ==> ")

        except SQLException as e:
            self.log(
                Level.INFO, "Could not open database file (not SQLite) " +
                database_file + " (" + e.getMessage() + ")")
            #return IngestModule.ProcessResult.OK

        # Create the attribute type, if it exists then catch the error
        try:
            attID_fse_fn = skCase.addArtifactAttributeType(
                "TSK_FSEVENTS_FILE_NAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "File Name")
        except:
            self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ")

        try:
            attID_fse_msk = skCase.addArtifactAttributeType(
                "TSK_FSEVENTS_FILE_MASK",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Mask")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ")

        try:
            attID_fse_src = skCase.addArtifactAttributeType(
                "TSK_FSEVENTS_SOURCE",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Source File")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ")

        try:
            attID_fse_dte = skCase.addArtifactAttributeType(
                "TSK_FSEVENTS_DATES",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Date(s)")
        except:
            self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ")

        try:
            Class.forName("org.sqlite.JDBC").newInstance()
            dbConn = DriverManager.getConnection(
                "jdbc:sqlite:%s" % os.path.join(
                    temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite"))
        except SQLException as e:
            self.log(
                Level.INFO, "Could not open database file (not SQLite) " +
                database_file + " (" + e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        #artID_fse = skCase.getArtifactTypeID("TSK_MACOS_FSEVENTS")
        #artID_fse_evt = skCase.getArtifactType("TSK_MACOS_FSEVENTS")
        artID_fse = skCase.getArtifactTypeID("TSK_MACOS_ALL_FSEVENTS")
        artID_fse_evt = skCase.getArtifactType("TSK_MACOS_ALL_FSEVENTS")
        attID_fse_fn = skCase.getAttributeType("TSK_FSEVENTS_FILE_NAME")
        attID_fse_msk = skCase.getAttributeType("TSK_FSEVENTS_FILE_MASK")
        attID_fse_src = skCase.getAttributeType("TSK_FSEVENTS_SOURCE")
        attID_fse_dte = skCase.getAttributeType("TSK_FSEVENTS_DATES")

        # Query the database
        for file in files:
            if ('slack' in file.getName()):
                pass
            elif (file.getName() == '..') or (file.getName() == '.'):
                pass
            else:
                stmt1 = dbConn1.createStatement()
                sql_statement1 = "select sql_statement, artifact_name, artifact_title from extracted_content_sql;"
                #self.log(Level.INFO, "SQL Statement ==> " + sql_statement)
                resultSet1 = stmt1.executeQuery(sql_statement1)
                while resultSet1.next():
                    try:
                        artID_fse = skCase.getArtifactTypeID(
                            resultSet1.getString("artifact_name"))
                        artID_fse_evt = skCase.getArtifactType(
                            resultSet1.getString("artifact_name"))

                        try:
                            stmt = dbConn.createStatement()
                            sql_statement = resultSet1.getString(
                                "sql_statement"
                            ) + " and source like '%" + file.getName() + "';"
                            #self.log(Level.INFO, "SQL Statement ==> " + sql_statement)
                            resultSet = stmt.executeQuery(sql_statement)
                            #self.log(Level.INFO, "query SQLite Master table ==> " )
                            #self.log(Level.INFO, "query " + str(resultSet))
                            # Cycle through each row and create artifact
                            while resultSet.next():
                                # Add the attributes to the artifact.
                                art = file.newArtifact(artID_fse)
                                #self.log(Level.INFO, "Result ==> " + resultSet.getString("mask") + ' <==> ' + resultSet.getString("source"))
                                art.addAttributes(((BlackboardAttribute(attID_fse_fn, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("filename"))), \
                                              (BlackboardAttribute(attID_fse_msk, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("mask"))), \
                                              (BlackboardAttribute(attID_fse_src, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("source"))), \
                                              (BlackboardAttribute(attID_fse_dte, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("OTHER_DATES")))))

                                #try:
                                # index the artifact for keyword search
                                #blackboard.indexArtifact(art)
                                #except:
                                #self.log(Level.INFO, "Error indexing artifact " + art.getDisplayName())

                        except SQLException as e:
                            self.log(
                                Level.INFO,
                                "Could not open database file (not SQLite) " +
                                database_file + " (" + e.getMessage() + ")")
                            return IngestModule.ProcessResult.OK
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Could not open database file (not SQLite) " +
                            database_file + " (" + e.getMessage() + ")")

            try:
                stmt.close()
            except:
                self.log(Level.INFO,
                         "Error closing statement for " + file.getName())

            # Fire an event to notify the UI and others that there are new artifacts
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(MacFSEventsIngestModuleFactory.moduleName,
                                artID_fse_evt, None))

        try:
            stmt.close()
            dbConn.close()
            stmt1.close()
            dbConn1.close()
            #os.remove(Temp_Dir + "Autopsy_FSEvents-EXCEPTIONS_LOG.txt")
            #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records.tsv")
            #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records_DB.sqlite")
            shutil.rmtree(temp_dir)
        except:
            self.log(
                Level.INFO,
                "removal of MacFSEvents imageinfo database failed " + temp_dir)

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "MacFSEventsSettings",
            " MacFSEventsSettings Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
class FileMarkerIngestModule(FileIngestModule):

    _logger = Logger.getLogger(FileMarkerIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)

    def startUp(self, context):
        pass

    def process(self, file):
        
        if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or 
            (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or 
            (file.isFile() == False)):
            return IngestModule.ProcessResult.OK
    
        if (file.getName() == "$MFT" or file.getName() == "$LogFile" or file.getName() == "$UsnJrnl:$J"):
            
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), 
                  FileMarkerIngestModuleFactory.moduleName, "File System")
            art.addAttribute(att)
  
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(FileMarkerIngestModuleFactory.moduleName, 
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None));

        if (file.getNameExtension() == "evtx"):
            
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), 
                  FileMarkerIngestModuleFactory.moduleName, "Event Logs")
            art.addAttribute(att)
  
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(FileMarkerIngestModuleFactory.moduleName, 
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None));

        if (file.getName() == "pagefile.sys" or file.getName() == "hiberfil.sys" or file.getName() == "MEMORY.DMP"):
            
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), 
                  FileMarkerIngestModuleFactory.moduleName, "Memory")
            art.addAttribute(att)
  
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(FileMarkerIngestModuleFactory.moduleName, 
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None));

        if (file.getNameExtension() == "pf"):
            
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), 
                  FileMarkerIngestModuleFactory.moduleName, "Prefetch")
            art.addAttribute(att)
  
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(FileMarkerIngestModuleFactory.moduleName, 
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None));

        if (file.getName() == "SYSTEM" or file.getName() == "SECURITY" or file.getName() == "SOFTWARE" or file.getName() == "SAM" or file.getName() == "NTUSER.DAT" or file.getName() == "UsrClass.dat" or file.getName() == "RecentFileCache.bcf" or file.getName() == "Amcache.hve"):
            
            art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
            att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), 
                  FileMarkerIngestModuleFactory.moduleName, "Registry")
            art.addAttribute(att)
  
            IngestServices.getInstance().fireModuleDataEvent(
                ModuleDataEvent(FileMarkerIngestModuleFactory.moduleName, 
                    BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None));

        return IngestModule.ProcessResult.OK
 
    def shutDown(self):
        None
class ParsePrefetchDbIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(ParsePrefetchDbIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    def startUp(self, context):
        self.context = context

        # Get path to EXE based on where this script is run from.
        # Assumes EXE is in same folder as script
        # Verify it is there before any ingest starts
        self.path_to_exe = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "Prefetch_Parser_Autopsy.exe")
        if not os.path.exists(self.path_to_exe):
            raise IngestModuleException("EXE was not found in module folder")

        # Throw an IngestModule.IngestModuleException exception if there was a problem setting up
        # raise IngestModuleException("Oh No!")

    # Where the analysis is done.
    # The 'dataSource' object being passed in is of type org.sleuthkit.datamodel.Content.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/interfaceorg_1_1sleuthkit_1_1datamodel_1_1_content.html
    # 'progressBar' is of type org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress
    # See: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_data_source_ingest_module_progress.html
    def process(self, dataSource, progressBar):

        # Check to see if the artifacts exist and if not then create it, also check to see if the attributes
        # exist and if not then create them
        skCase = Case.getCurrentCase().getSleuthkitCase()
        try:
            self.log(Level.INFO, "Begin Create New Artifacts")
            artID_pf = skCase.addArtifactType("TSK_PREFETCH",
                                              "Windows Prefetch")
        except:
            self.log(
                Level.INFO,
                "Artifacts Creation Error, some artifacts may not exist now. ==> "
            )
            artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH")

        # Create the attribute type, if it exists then catch the error
        try:
            attID_pf_fn = skCase.addArtifactAttributeType(
                "TSK_PREFETCH_FILE_NAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Prefetch File Name")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Prefetch File Name. ==> ")

        try:
            attID_pf_an = skCase.addArtifactAttributeType(
                "TSK_PREFETCH_ACTUAL_FILE_NAME",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Actual File Name")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Actual File Name. ==> ")

        try:
            attID_nr = skCase.addArtifactAttributeType(
                "TSK_PF_RUN_COUNT",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "Program Number Runs")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, Program Number Runs. ==> ")

        try:
            attID_ex1 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_1",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 1")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 1. ==> ")

        try:
            attID_ex2 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_2",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 2")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 2. ==> ")

        try:
            attID_ex3 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_3",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 3")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 3. ==> ")

        try:
            attID_ex4 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_4",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 4")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 4 ==> ")

        try:
            attID_ex5 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_5",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 5")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 5. ==> ")

        try:
            attID_ex6 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_6",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 6")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 6. ==> ")

        try:
            attID_ex7 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_7",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 7")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 7. ==> ")

        try:
            attID_ex8 = skCase.addArtifactAttributeType(
                "TSK_PF_EXEC_DTTM_8",
                BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                "PF Execution DTTM 8")
        except:
            self.log(Level.INFO,
                     "Attributes Creation Error, PF Execution DTTM 8 ==> ")

        self.log(Level.INFO, "Get Artifacts after they were created.")
        # Get the new artifacts and attributes that were just created
        artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH")
        artID_pf_evt = skCase.getArtifactType("TSK_PREFETCH")
        attID_pf_fn = skCase.getAttributeType("TSK_PREFETCH_FILE_NAME")
        attID_pf_an = skCase.getAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME")
        attID_nr = skCase.getAttributeType("TSK_PF_RUN_COUNT")
        attID_ex1 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_1")
        attID_ex2 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_2")
        attID_ex3 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_3")
        attID_ex4 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_4")
        attID_ex5 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_5")
        attID_ex6 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_6")
        attID_ex7 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_7")
        attID_ex8 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_8")

        # Uncomment for debugging purposes, not normally needed
        # self.log(Level.INFO, "Artifact id for TSK_PREFETCH ==> " + str(artID_pf))
        # self.log(Level.INFO, "Attribute id for TSK_PREFETCH_FILE_NAME ==> " + str(attID_pf_fn))
        # self.log(Level.INFO, "Attribute id for TSK_PREFETCH_ACTUAL_FILE_NAME ==> " + str(attID_pf_an))
        # self.log(Level.INFO, "Attribute id for TSK_PF_RUN_COUNT ==> " + str(attID_nr))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_1 ==> " + str(attID_ex1))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_2 ==> " + str(attID_ex2))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_3 ==> " + str(attID_ex3))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_4 ==> " + str(attID_ex4))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_5 ==> " + str(attID_ex5))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_6 ==> " + str(attID_ex6))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_7 ==> " + str(attID_ex7))
        # self.log(Level.INFO, "Attribute id for TSK_PF_EXEC_DTTM_8 ==> " + str(attID_ex8))

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Find the prefetch files and the layout.ini file from the /windows/prefetch folder
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "%.pf")

        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create prefetch directory in temp directory, if it exists then continue on processing
        Temp_Dir = Case.getCurrentCase().getTempDirectory() + "\Prefetch_Files"
        self.log(Level.INFO, "create Directory " + Temp_Dir)
        try:
            os.mkdir(Temp_Dir)
        except:
            self.log(Level.INFO,
                     "Prefetch Directory already exists " + Temp_Dir)

        # Write out each prefetch file to the temp directory
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the DB locally in the temp folder. use file id as name to reduce collisions
            lclDbPath = os.path.join(Temp_Dir, file.getName())
            ContentUtils.writeToFile(file, File(lclDbPath))

        # Example has only a Windows EXE, so bail if we aren't on Windows
        if not PlatformUtil.isWindowsOS():
            self.log(Level.INFO,
                     "Ignoring data source.  Not running on Windows")
            return IngestModule.ProcessResult.OK

        # Run the EXE, saving output to a sqlite database
        self.log(
            Level.INFO,
            "Running program on data source parm 1 ==> " + Temp_Dir +
            "  Parm 2 ==> " + Case.getCurrentCase().getTempDirectory())
        subprocess.Popen([
            self.path_to_exe, Temp_Dir,
            Case.getCurrentCase().getTempDirectory()
        ]).communicate()[0]

        # Set the database to be read to the once created by the prefetch parser program
        lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(),
                                 "Autopsy_PF_DB.db3")
        self.log(Level.INFO,
                 "Path the prefetch database file created ==> " + lclDbPath)

        # Open the DB using JDBC
        try:
            Class.forName("org.sqlite.JDBC").newInstance()
            dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath)
        except SQLException as e:
            self.log(
                Level.INFO, "Could not open database file (not SQLite) " +
                file.getName() + " (" + e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        # Query the contacts table in the database and get all columns.
        try:
            stmt = dbConn.createStatement()
            resultSet = stmt.executeQuery(
                "Select prefetch_File_Name, actual_File_Name, Number_time_file_run, "
                + " Embeded_date_Time_Unix_1, " +
                " Embeded_date_Time_Unix_2, " + " Embeded_date_Time_Unix_3, " +
                " Embeded_date_Time_Unix_4, " + " Embeded_date_Time_Unix_5, " +
                " Embeded_date_Time_Unix_6, " + " Embeded_date_Time_Unix_7, " +
                " Embeded_date_Time_Unix_8 " + " from prefetch_file_info ")
        except SQLException as e:
            self.log(
                Level.INFO, "Error querying database for Prefetch table (" +
                e.getMessage() + ")")
            return IngestModule.ProcessResult.OK

        # Cycle through each row and create artifacts
        while resultSet.next():
            try:
                self.log(
                    Level.INFO, "Result (" +
                    resultSet.getString("Prefetch_File_Name") + ")")
                Prefetch_File_Name = resultSet.getString("Prefetch_File_Name")
                Actual_File_Name = resultSet.getString("Actual_File_Name")
                Number_Of_Runs = resultSet.getString("Number_Time_File_Run")
                Time_1 = resultSet.getString("Embeded_date_Time_Unix_1")
                Time_2 = resultSet.getString("Embeded_date_Time_Unix_2")
                Time_3 = resultSet.getString("Embeded_date_Time_Unix_3")
                Time_4 = resultSet.getString("Embeded_date_Time_Unix_4")
                Time_5 = resultSet.getString("Embeded_date_Time_Unix_5")
                Time_6 = resultSet.getString("Embeded_date_Time_Unix_6")
                Time_7 = resultSet.getString("Embeded_date_Time_Unix_7")
                Time_8 = resultSet.getString("Embeded_date_Time_Unix_8")
            except SQLException as e:
                self.log(
                    Level.INFO, "Error getting values from contacts table (" +
                    e.getMessage() + ")")

            fileManager = Case.getCurrentCase().getServices().getFileManager()
            files = fileManager.findFiles(dataSource, Prefetch_File_Name)

            for file in files:
                # Make artifact for TSK_PREFETCH,  this can happen when custom attributes are fully supported
                #art = file.newArtifact(artID_pf)
                art = file.newArtifact(artID_pf)

                #self.log(Level.INFO, "Attribute Number ==>" + str(attID_pf_fn) + " " + str(attID_pf_an) )
                # Add the attributes to the artifact.
                art.addAttributes(((BlackboardAttribute(attID_pf_fn, ParsePrefetchDbIngestModuleFactory.moduleName, Prefetch_File_Name)), \
                                  (BlackboardAttribute(attID_pf_an, ParsePrefetchDbIngestModuleFactory.moduleName, Actual_File_Name)), \
                                  (BlackboardAttribute(attID_nr, ParsePrefetchDbIngestModuleFactory.moduleName, Number_Of_Runs)), \
                                  (BlackboardAttribute(attID_ex1, ParsePrefetchDbIngestModuleFactory.moduleName, Time_1)), \
                                  (BlackboardAttribute(attID_ex2, ParsePrefetchDbIngestModuleFactory.moduleName, Time_2)), \
                                  (BlackboardAttribute(attID_ex3, ParsePrefetchDbIngestModuleFactory.moduleName, Time_3)), \
                                  (BlackboardAttribute(attID_ex4, ParsePrefetchDbIngestModuleFactory.moduleName, Time_4)), \
                                  (BlackboardAttribute(attID_ex5, ParsePrefetchDbIngestModuleFactory.moduleName, Time_5)), \
                                  (BlackboardAttribute(attID_ex6, ParsePrefetchDbIngestModuleFactory.moduleName, Time_6)), \
                                  (BlackboardAttribute(attID_ex7, ParsePrefetchDbIngestModuleFactory.moduleName, Time_7)), \
                                  (BlackboardAttribute(attID_ex8, ParsePrefetchDbIngestModuleFactory.moduleName, Time_8))))

        # Fire an event to notify the UI and others that there are new artifacts
        IngestServices.getInstance().fireModuleDataEvent(
            ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName,
                            artID_pf_evt, None))

        # Clean up
        stmt.close()
        dbConn.close()
        os.remove(lclDbPath)

        #Clean up prefetch directory and files
        for file in files:
            try:
                os.remove(Temp_Dir + "\\" + file.getName())
            except:
                self.log(
                    Level.INFO, "removal of prefetch file failed " + Temp_Dir +
                    "\\" + file.getName())
        try:
            os.rmdir(Temp_Dir)
        except:
            self.log(Level.INFO,
                     "removal of prefetch directory failed " + Temp_Dir)

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                                              "Prefetch Analyzer",
                                              " Prefetch Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        # Fire an event to notify the UI and others that there are new artifacts
        IngestServices.getInstance().fireModuleDataEvent(
            ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName,
                            artID_pf_evt, None))

        return IngestModule.ProcessResult.OK
Beispiel #35
0
 def __init__(self):
     self._logger = Logger.getLogger(self.__class__.__name__)
class ProcessActivitiesCacheIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(
        ProcessActivitiesCacheIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")
        self.stringColumns = (('TSK_ACTCACHE_ID','ActivityCache Id'), ('TSK_ACTCACHE_APP_ID', 'Activity Cache App Id'), \
                              ('TSK_ACTCACHE_PAYLOAD','Activity Cache Payload'), ('TSK_ACTCACHE_ACT_TYPE','Activity Type'), \
                              ('TSK_ACTCACHE_LOCAL_ONLY','Is Local Only'), ('TSK_ACTCACHE_ETAG','ETag'), \
                              ('TSK_ACTCACHE_PKGID_HASH','Package Id Hash'), ('TSK_ACTCACHE_PLAT_DEVID','Platform Device Id'), \
                              ('TSK_ACTCACHE_STATUS','Activity Cache Status'))

        self.dateColumns = (('TSK_ACTCACHE_ST_TIME','Start Time'), ('TSK_ACTCACHE_ENDTIME','End Time'), \
                            ('TSK_ACTCACHE_LAST_MOD','Last Modified Time'), ('TSK_ACTCACHE_EXP_TIME','Expiration Time'), \
                            ('TSK_ACTCACHE_CRT_CLOUD','Created In Cloud'), ('TSK_ACTCACHE_LAST_MOD_CLIENT','Last Modified On Client'), \
                            ('TSK_ACTCACHE_ORIG_LMOC','Original Last Modified On Client'))
        self.dateColumn = ('TSK_ACTCACHE_ST_TIME', 'TSK_ACTCACHE_ENDTIME', 'TSK_ACTCACHE_LAST_MOD', 'TSK_ACTCACHE_EXP_TIME', \
                           'TSK_ACTCACHE_CRT_CLOUD', 'TSK_ACTCACHE_LAST_MOD_CLIENT', 'TSK_ACTCACHE_ORIG_LMOC')

    def startUp(self, context):
        self.context = context
        pass

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        #progressBar.switchToIndeterminate()

        # get current case and the ActivitiesCache abstract file information
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "ActivitiesCache%")
        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0
        moduleName = ProcessActivitiesCacheIngestModuleFactory.moduleName

        # Create Event Log directory in temp directory, if it exists then continue on processing
        temporaryDirectory = os.path.join(
            Case.getCurrentCase().getTempDirectory(), "ActivitiesCache")
        #self.log(Level.INFO, "create Directory " + moduleDirectory)
        try:
            os.mkdir(temporaryDirectory)
        except:
            pass
            #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory)

        filePathId = {}
        for file in files:
            fileName = file.getName()
            if fileName.endswith(".db"):
                filePathId[file.getParentPath()] = file.getId()
                self.log(
                    Level.INFO,
                    "file path and id ==> " + str(file.getParentPath()) +
                    " <> " + str(file.getId()) + " <> " + str(fileName))

        if numFiles > 0:
            for artifact in self.stringColumns:
                try:
                    attID = skCase.addArtifactAttributeType(
                        artifact[0], BlackboardAttribute.
                        TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING,
                        artifact[1])
                    #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID))
                except:
                    self.log(
                        Level.INFO,
                        "Attributes Creation Error, " + artifact[0] + " ==> ")
            for artifact in self.dateColumns:
                try:
                    attID = skCase.addArtifactAttributeType(
                        artifact[0], BlackboardAttribute.
                        TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME,
                        artifact[1])
                    #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID))
                except:
                    self.log(
                        Level.INFO,
                        "Attributes Creation Error, " + artifact[0] + " ==> ")
            try:
                #self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_ACTCACHE_DB")
                artID_art = skCase.addArtifactType(
                    "TSK_ACTCACHE_DB", "Activities Cache Timeline DB")
            except:
                self.log(
                    Level.INFO,
                    "Artifacts Creation Error, artifact TSK_ACTCACHE_DB exists. ==> "
                )

        # Write out each users store.vol file and process it.
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the file locally. Use file id as name to reduce collisions
            fileId = filePathId[file.getParentPath()]
            extractedFile = os.path.join(temporaryDirectory,
                                         str(fileId) + "-" + file.getName())

            ContentUtils.writeToFile(file, File(extractedFile))

            userpath = file.getParentPath()
            username = userpath.split('/')
            #self.log(Level.INFO, "Getting Username " + username[2]   )

            #        for file in files:
            fileName = file.getName()
            if fileName.endswith(".db"):
                extractedFile = os.path.join(
                    temporaryDirectory,
                    str(filePathId[file.getParentPath()]) + "-" +
                    file.getName())

                artActCacheId = skCase.getArtifactTypeID("TSK_ACTCACHE_DB")
                self.log(Level.INFO, "Artifact id ==> " + str(artActCacheId))
                artActCache = skCase.getArtifactType("TSK_ACTCACHE_DB")

                moduleName = ProcessActivitiesCacheIngestModuleFactory.moduleName

                try:
                    Class.forName("org.sqlite.JDBC").newInstance()
                    dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                         extractedFile)
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Could not open database file (not SQLite) " +
                        extractedFile + " (" + e.getMessage() + ")")
                    return IngestModule.ProcessResult.OK

                try:
                    stmt = dbConn.createStatement()
                    resultSet = stmt.executeQuery("select hex(id) TSK_ACTCACHE_ID, appId TSK_ACTCACHE_APP_ID, " + \
                                                  " cast(Payload as Text) TSK_ACTCACHE_PAYLOAD, " + \
                                                  " ActivityType TSK_ACTCACHE_ACT_TYPE, ActivityStatus TSK_ACTCACHE_STATUS, " + \
                                                  " startTime TSK_ACTCACHE_ST_TIME, EndTime TSK_ACTCACHE_ENDTIME, " + \
                                                  " LastModifiedTime TSK_ACTCACHE_LAST_MOD, ExpirationTime TSK_ACTCACHE_EXP_TIME, " + \
                                                  " createdInCloud TSK_ACTCACHE_CRT_CLOUD, " + \
                                                  " LastModifiedOnClient TSK_ACTCACHE_LAST_MOD_CLIENT, " + \
                                                  " OriginalLastModifiedOnClient TSK_ACTCACHE_ORIG_LMOC, " + \
                                                  " isLocalOnly TSK_ACTCACHE_LOCAL_ONLY, Etag TSK_ACTCACHE_ETAG, " + \
                                                  " packageIdHash TSK_ACTCACHE_PKGID_HASH, " + \
                                                  " PlatformDeviceId TSK_ACTCACHE_PLAT_DEVID from smartlookup")
                    #self.log(Level.INFO, "query smartlookup table")
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error querying database for smartlookup tables (" +
                        e.getMessage() + ") ")
                    return IngestModule.ProcessResult.OK

                meta = resultSet.getMetaData()
                columnCount = meta.getColumnCount()
                columnNames = []
                self.log(
                    Level.INFO,
                    "Number of Columns in the table ==> " + str(columnCount))
                for x in range(1, columnCount + 1):
                    #self.log(Level.INFO, "Column Count ==> " + str(x))
                    #self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x))
                    columnNames.append(meta.getColumnLabel(x))

                # Cycle through each row and get the data
                self.log(Level.INFO, "Start PRocessing")
                while resultSet.next():
                    try:
                        artifact = file.newArtifact(artActCacheId)
                        attributes = ArrayList()
                        attributes.add(
                            BlackboardAttribute(
                                BlackboardAttribute.ATTRIBUTE_TYPE.
                                TSK_USER_NAME.getTypeID(), moduleName,
                                username[2]))
                        for x in range(0, columnCount):
                            if columnNames[x] in self.dateColumn:
                                #self.log(Level.INFO, "Date ColumnName ==> " + columnNames[x])
                                attributes.add(
                                    BlackboardAttribute(
                                        skCase.getAttributeType(
                                            columnNames[x]), moduleName,
                                        resultSet.getInt(columnNames[x])))
                            else:
                                if columnNames[x] == "TSK_ACTCACHE_ID":
                                    #self.log(Level.INFO, "ColumnName ==> " + columnNames[x])
                                    attributes.add(
                                        BlackboardAttribute(
                                            skCase.getAttributeType(
                                                columnNames[x]), moduleName,
                                            resultSet.getString(
                                                columnNames[x])))
                                else:
                                    attributes.add(
                                        BlackboardAttribute(
                                            skCase.getAttributeType(
                                                columnNames[x]), moduleName,
                                            resultSet.getString(
                                                columnNames[x])))

                            #self.log(Level.INFO, "Column Count ==> " + str(x))

                        artifact.addAttributes(attributes)

                        # index the artifact for keyword search
                        try:
                            blackboard.indexArtifact(artifact)
                        except:
                            pass
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Error getting values from smartlookup table (" +
                            e.getMessage() + ")")

            # Close the database statement
                try:
                    stmt.close()
                    dbConn.close()
                except:
                    pass

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "ActivitiesCache",
            " ActivitiesCache's Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK
Beispiel #37
0
    def log(self, level, msg):
        if _logger == None:
            _logger = Logger.getLogger(self.moduleName)

        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
class ArtifactGroup(DataSourceIngestModule):

    _logger = Logger.getLogger(ArtifactGroupFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)

    def __init__(self):
        self.context = None

    # Where any setup and configuration is done
    # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext.
    # See: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html
    # TODO: Add any setup code that you need here.
    def startUp(self, context):
        self.context = context

    pass

    # Where the analysis is done.  Each file will be passed into here.
    # The 'file' object being passed in is of type org.sleuthkit.datamodel.AbstractFile.
    # See: http://www.sleuthkit.org/sleuthkit/docs/jni-docs/4.6.0/classorg_1_1sleuthkit_1_1datamodel_1_1_abstract_file.html
    # TODO: Add your analysis code in here.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # Find Delivery clues
        # get the sleuthkit database. See org.sleuthkit.datamodel.sleuthkitcase
        # http://sleuthkit.org/sleuthkit/docs/jni-docs/4.10.1/annotated.html
        skCase = Case.getCurrentCase().getSleuthkitCase()
        # get the artifact_type_id of the TSK_WEB_DOWNLOAD artifact type
        artWebDownloadId = skCase.getArtifactTypeID("TSK_WEB_DOWNLOAD")
        # print it to the log file
        self.log(
            Level.INFO,
            "Artifact type ID of TSK_WEB_DOWNLOAD:  " + str(artWebDownloadId))
        # get all artifacts that have this type ID from the database using the Sleuthkit API - not the database via sql queries
        webDownloadArtifacts = skCase.getBlackboardArtifacts(artWebDownloadId)
        # print the number of the artifacts in the log file
        self.log(
            Level.INFO, "Number of TSK_WEB_DOWNLOAD artifacts found:  " +
            str(len(webDownloadArtifacts)))
        # create new artifact type
        try:
            skCase.addArtifactType("TSK_CKC_WEB_DOWNLOAD",
                                   "CKC Delivery Web Downloads")
        except:
            # if the artifact type already exists do nothing
            self.log(Level.INFO,
                     "TSK_CKC_WEB_DOWNLOAD artifact already exists")
        # the attributes of the TSK_CKC_WEB_DOWNLOAD will be the same with those of TSK_WEB_DOWNLOAD
        # so we use them instead of creating new ones

        # first we need to get the IDs of the TSK_CKC_WEB_DOWNLOAD and of the attributes of the TSK_WEB_DOWNLOAD
        artID_CKC_WEB_DOWNLOAD = skCase.getArtifactTypeID(
            "TSK_CKC_WEB_DOWNLOAD")
        attID_TSK_PATH = skCase.getAttributeType("TSK_PATH")
        attID_TSK_URL = skCase.getAttributeType("TSK_URL")
        attID_TSK_DATETIME_ACCESSED = skCase.getAttributeType(
            "TSK_DATETIME_ACCESSED")
        attID_TSK_DOMAIN = skCase.getAttributeType("TSK_DOMAIN")
        attID_TSK_PATH_ID = skCase.getAttributeType("TSK_PATH_ID")
        attID_TSK_PROG_NAME = skCase.getAttributeType("TSK_PROG_NAME")

        # for each TSK_WEB_DOWNLOAD artifact
        for wdArt in webDownloadArtifacts:
            # get the obj_id -> this is the ID of the Source file
            sourceFileID = wdArt.getObjectID()
            # get the actual file using its obj_id
            sourceFile = skCase.getAbstractFileById(sourceFileID)
            # create a TSK_CKC_WEB_DOWNLOAD blackboard artifact based on this TSK_WEB_DOWNLOAD
            try:
                art = sourceFile.newArtifact(artID_CKC_WEB_DOWNLOAD)
                art.addAttributes((
                    (BlackboardAttribute(attID_TSK_PATH, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_PATH).getValueString())), \
                    (BlackboardAttribute(attID_TSK_URL, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_URL).getValueString())), \
                    (BlackboardAttribute(attID_TSK_DATETIME_ACCESSED, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_DATETIME_ACCESSED).getValueLong())), \
                    (BlackboardAttribute(attID_TSK_DOMAIN, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_DOMAIN).getValueString())), \
                    (BlackboardAttribute(attID_TSK_PROG_NAME, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_PROG_NAME).getValueString())), \
                    (BlackboardAttribute(attID_TSK_PATH_ID, ArtifactGroupFactory.moduleName,
                                         wdArt.getAttribute(attID_TSK_PATH_ID).getValueLong()))
                ))
            except:
                self.log(Level.INFO,
                         "Artifact cannot be created. Moved to next.")

        return IngestModule.ProcessResult.OK

    def shutDown(self):
        # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread)
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, ArtifactGroupFactory.moduleName,
            str(self.filesFound) + " files found")
        ingestServices = IngestServices.getInstance().postMessage(message)
Beispiel #39
0
class HashImagesIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(HashImagesIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")
        self.MD5HashToCheck = ""
        self.SHA1HashToCheck = ""
        self.FTKLogFile = ""

    def startUp(self, context):
        self.context = context
        self.FTKLogFile = self.local_settings.getFTKLogFile()
        self.MD5HashToCheck = self.local_settings.getMD5HashValue()
        self.SHA1HashToCheck = self.local_settings.getSHA1HashValue()
        self.log(Level.INFO, "Settings ==> " + str( self.MD5HashToCheck) + " <> " + str( self.SHA1HashToCheck))
        self.log(Level.INFO, "Settings ==> " + str(len(self.MD5HashToCheck)) + " <> " + str(len(self.SHA1HashToCheck)))
        pass
 
    def getFTKHashs(self, fileName):

        hashDict = {}
        
        with open(fileName, "r") as f:
            txtLine = f.readline()
            while txtLine:
                print (txtLine)
                if "MD5" in txtLine:
                   hashLine = txtLine.split(":")
                   if len(hashLine) > 2:
                       hashDict["Verify MD5"] = hashLine[1].strip()
                   else:
                       hashDict["Computed MD5"] = hashLine[1].strip()
                elif "SHA1" in txtLine:
                   hashLine = txtLine.split(":")
                   if len(hashLine) > 2:
                       hashDict["Verify SHA1"] = hashLine[1].strip()
                   else:
                       hashDict["Computed SHA1"] = hashLine[1].strip()
                txtLine = f.readline()
         
        self.log(Level.INFO, "Hashs found in File ==> " + str(hashDict))         
        return hashDict 

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

    
        FTKHashValues = {}
        if (len(self.FTKLogFile) > 1):
            FTKHashValues = self.getFTKHashs(self.FTKLogFile)
        
        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()
        #progressBar.switchToDeterminate(numFiles)
        
        hashMd5 = hashlib.md5()
        hashSha1 = hashlib.sha1()
        
        hashImages = dataSource.getPaths()
    
        imgType = dataSource.getType()
        
        self.log(Level.INFO, "Image Type ==> " + str(imgType))

        if ((imgType == TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_RAW_SING) or \
            (imgType == TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_RAW_SPLIT) or \
            (imgType == TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_VHD_VHD) or \
            (imgType == TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_VMDK_VMDK)): 
        
            for fileName in hashImages:

                if self.context.isJobCancelled():
                    return IngestModule.ProcessResult.OK

                self.log(Level.INFO, "filename ==> " + fileName)
                with open(fileName, "rb") as f:
                    for chunk in iter(lambda: f.read(4096), b""):
                        hashSha1.update(chunk)
                        hashMd5.update(chunk)
        
            self.log(Level.INFO, "MD5 Hash is " + str(hashMd5.hexdigest()))
            self.log(Level.INFO, "sha1 Hash is " + str(hashSha1.hexdigest()))
            
            if len(FTKHashValues) > 0:
                if ((FTKHashValues['Computed MD5'] in str(hashMd5.hexdigest())) and \
                    (FTKHashValues['Verify MD5'] in str(hashMd5.hexdigest())) and \
                    (FTKHashValues['Computed SHA1'] in str(hashSha1.hexdigest())) and \
                    (FTKHashValues['Verify SHA1'] in str(hashSha1.hexdigest()))):
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images verified by FTK Imager Log " + self.FTKLogFile)
                    IngestServices.getInstance().postMessage(message)
                else:
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images NOT verified by FTK Imager Log" + self.FTKLogFile)
                    IngestServices.getInstance().postMessage(message)
                return IngestModule.ProcessResult.OK
            elif len(self.MD5HashToCheck) > 0:
                self.log(Level.INFO, "MD5 Hash Provided ")
                if (self.MD5HashToCheck in hashMd5.hexdigest()):
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images - verified by supplied Value MD5 " + str(hashMd5.hexdigest()))
                    IngestServices.getInstance().postMessage(message)
                else:
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images - NOT verified by supplied Value MD5 " + str(hashMd5.hexdigest()))
                    IngestServices.getInstance().postMessage(message)
                return IngestModule.ProcessResult.OK
            elif len(self.SHA1HashToCheck) > 0:
                self.log(Level.INFO, "SHA1 Provided ")
                if (self.SHA1HashToCheck in hashSha1.hexdigest()):
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images - verified by supplied Value SHA1 " + str(hashSha1.hexdigest()))
                    IngestServices.getInstance().postMessage(message)
                else:
                    message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                         "Hash Images processed", " Hash Images - NOT verified by supplied Value SHA1 " + str(hashSha1.hexdigest()))
                    IngestServices.getInstance().postMessage(message)
                return IngestModule.ProcessResult.OK
            else:
                self.log(Level.INFO, "no hashes provided ")
                message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, \
                "Hash Images processed", " Hash Images - NO valid MD5/SHA1 provided to compare " + str(hashMd5.hexdigest()))
                IngestServices.getInstance().postMessage(message)
                return IngestModule.ProcessResult.OK
                
        else:
            message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
                "Hash_Images", "Skipping Non RAW, VHD, VMDK image " + str(hashImages[0]) )
            IngestServices.getInstance().postMessage(message)

            return IngestModule.ProcessResult.OK                
class ProcessAppxProgramsIngestModule(DataSourceIngestModule):

    _logger = Logger.getLogger(
        ProcessAppxProgramsIngestModuleFactory.moduleName)

    def log(self, level, msg):
        self._logger.logp(level, self.__class__.__name__,
                          inspect.stack()[1][3], msg)
        self._logger = Logger.getLogger(self.__class__.__name__)

    def __init__(self, settings):
        self.context = None
        self.local_settings = settings
        self._logger = Logger.getLogger(self.__class__.__name__)
        self._logger.log(Level.SEVERE, "Starting of plugin")

    def startUp(self, context):
        self.context = context
        pass

    # Where the analysis is done.
    def process(self, dataSource, progressBar):

        # we don't know how much work there is yet
        progressBar.switchToIndeterminate()

        # get current case and the store.vol abstract file information
        skCase = Case.getCurrentCase().getSleuthkitCase()
        fileManager = Case.getCurrentCase().getServices().getFileManager()
        files = fileManager.findFiles(dataSource, "staterepository-machine%")
        numFiles = len(files)
        self.log(Level.INFO, "found " + str(numFiles) + " files")
        progressBar.switchToDeterminate(numFiles)
        fileCount = 0

        # Create Event Log directory in temp directory, if it exists then continue on processing
        temporaryDirectory = os.path.join(
            Case.getCurrentCase().getTempDirectory(), "Appx_Programs")
        #self.log(Level.INFO, "create Directory " + moduleDirectory)
        try:
            os.mkdir(temporaryDirectory)
        except:
            pass
            #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory)

        # Write out each users store.vol file and process it.
        for file in files:

            # Check if the user pressed cancel while we were busy
            if self.context.isJobCancelled():
                return IngestModule.ProcessResult.OK

            #self.log(Level.INFO, "Processing file: " + file.getName())
            fileCount += 1

            # Save the file locally. Use file id as name to reduce collisions
            extractedFile = os.path.join(temporaryDirectory, file.getName())
            ContentUtils.writeToFile(file, File(extractedFile))
            #os.remove(extractedFile)

        for file in files:
            #os.remove(extractedFile)
            if file.getName().lower() == "staterepository-machine.srd":
                extractedFile = os.path.join(temporaryDirectory,
                                             file.getName())

                artIdInsProg = skCase.getArtifactTypeID("TSK_INSTALLED_PROG")
                artIdInsProgType = skCase.getArtifactType("TSK_INSTALLED_PROG")

                moduleName = ProcessAppxProgramsIngestModuleFactory.moduleName

                try:
                    Class.forName("org.sqlite.JDBC").newInstance()
                    dbConn = DriverManager.getConnection("jdbc:sqlite:%s" %
                                                         extractedFile)
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Could not open database file (not SQLite) " +
                        extractedFile + " (" + e.getMessage() + ")")
                    return IngestModule.ProcessResult.OK

                try:
                    stmt = dbConn.createStatement()
                    resultSet = stmt.executeQuery("select distinct * from (Select packfam.name, packfam.publisher, packfam.publisherid, packuser.user, " + \
                                                  " case Architecture when 0 then 'X64' when 9 then 'x86' when 11 then 'Neutral' else Architecture end Architecture, " + \
                                                  " pack.ResourceId, " + \
                                                  " substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1, instr(substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1), '_') - 1) version, " + \
                                                  " packfam.packageFamilyname,  pack.packageFullName, '??' isFramework, '??' PackageUserInformaton, " + \
                                                  " '??' isResourcePakage, '??' IsBundle, '??' IsDevelopment, '??' Dependicies, '??' IsPartiallyStaged, " + \
                                                  " case SignatureOrigin when 3 then 'System' when 2 then 'Store' else 'Unknown' end SignatureKind, packuser.PackageStatus Status, " + \
                                                  " (substr(packuser.installTime,1,11) -11644473600) InstallTime, packloc.installedLocation " + \
                                                  " from PackageUser packuser, package pack, packageFamily packfam, packageLocation packloc " + \
                                                  " where packuser.package = pack._PackageId and pack.packageFamily = packfam._PackagefamilyId " + \
                                                  " and packloc.package = pack._packageId and (pack.resourceId is null or pack.resourceId = 'neutral')); ")
                    self.log(Level.INFO, "query Appx tables")
                except SQLException as e:
                    self.log(
                        Level.INFO,
                        "Error querying database for appx tables (" +
                        e.getMessage() + ") ")
                    return IngestModule.ProcessResult.OK

                # Cycle through each row and get the installed programs and install time
                while resultSet.next():
                    try:
                        artInsProg = file.newArtifact(artIdInsProg)
                        attributes = ArrayList()
                        attributes.add(
                            BlackboardAttribute(
                                BlackboardAttribute.ATTRIBUTE_TYPE.
                                TSK_PROG_NAME, moduleName,
                                resultSet.getString("name")))
                        attributes.add(
                            BlackboardAttribute(
                                BlackboardAttribute.ATTRIBUTE_TYPE.
                                TSK_DATETIME.getTypeID(), moduleName,
                                resultSet.getInt("InstallTime")))

                        artInsProg.addAttributes(attributes)

                        # index the artifact for keyword search
                        try:
                            blackboard.indexArtifact(artInsProg)
                        except:
                            pass
                    except SQLException as e:
                        self.log(
                            Level.INFO,
                            "Error getting values from Appx tables (" +
                            e.getMessage() + ")")

            # Close the database statement
                try:
                    stmt.close()
                    dbConn.close()
                except:
                    pass

        # After all databases, post a message to the ingest messages in box.
        message = IngestMessage.createMessage(
            IngestMessage.MessageType.DATA, "Appx Installed Programs",
            " Appx Installed Programs Has Been Analyzed ")
        IngestServices.getInstance().postMessage(message)

        return IngestModule.ProcessResult.OK