def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write(resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # Open the output file. fileName = os.path.join(baseReportDir, self.getRelativeFilePath()) report = open(fileName, 'w') # Query the database for the files (ignore the directories) sleuthkitCase = Case.getCurrentCase().getSleuthkitCase() files = sleuthkitCase.findAllFilesWhere("NOT meta_type = " + str(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue())) # Setup the progress bar progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(len(files)) for file in files: md5 = file.getMd5Hash() # md5 will be None if Hash Lookup module was not run if md5 is None: md5 = "" report.write(file.getUniquePath() + "," + md5 + "\n") progressBar.increment() report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport(fileName, self.moduleName, "Hashes CSV") progressBar.complete(ReportStatus.COMPLETE)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # For an example, we write a file with the number of files created in the past 2 weeks # Configure progress bar for 2 tasks progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(2) # Get files by created in last two weeks. fileCount = 0 autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime) for otherFile in otherFiles: fileCount += 1 progressBar.increment() # Write the result to the report file. report = open(baseReportDir + '\\' + self.getRelativeFilePath(), 'w') report.write("file count = %d" % fileCount) Case.getCurrentCase().addReport(report.name, "SampleGeneralReportModule", "Sample Python Report"); report.close() progressBar.increment() progressBar.complete()
def shutDown(self): noDupes = list(set(md5)) try: if(filename): uniquePath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "NewLowHangingFruit.txt") uniqueFile = open(uniquePath,'w') dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % filename) stmt = dbConn.createStatement() for line in noDupes: resultSet = stmt.executeQuery("SELECT * FROM MD5 where md5 == '%s'" % line) if(resultSet.next()): temp = "Future Improvement" else: uniqueFile.write(line+'\n') stmt.close() dbConn.close() uniqueFile.close() except: allPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "AllLowHangingFruit.txt") allFile = open(allPath,'w') for line in noDupes: allFile.write(line+'\n') allFile.close()
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "img_stat-" + str(dataSource.getId()) + ".txt") reportHandle = open(reportPath, 'w') # Run the EXE, saving output to the report # NOTE: we should really be checking for if the module has been # cancelled and then killing the process. self.log(Level.INFO, "Running program on data source") subprocess.Popen([self.path_to_exe, imagePaths[0]], stdout=reportHandle).communicate()[0] reportHandle.close() # Add the report to the case, so it shows up in the tree Case.getCurrentCase().addReport(reportPath, "Run EXE", "img_stat output") return IngestModule.ProcessResult.OK
def customizeComponents(self): #self.Exclude_File_Sources_CB.setSelected(self.local_settings.getExclude_File_Sources()) #self.Run_Timesketch_CB.setSelected(self.local_settings.getRun_Timesketch()) #self.Import_Timesketch_CB.setSelected(self.local_settings.getImport_Timesketch()) #self.check_Database_entries() self.IP_Address_TF.setText(self.local_settings.getSetting('ipAddress')) self.Port_Number_TF.setText(self.local_settings.getSetting('portNumber')) self.userName_TF.setText(self.local_settings.getSetting('userName')) self.password_TF.setText(self.local_settings.getSetting('password')) self.sketchName_TF.setText(Case.getCurrentCase().getNumber()) self.sketchDescription_TF.setText(Case.getCurrentCase().getName()) self.local_settings.setSetting('sketchName', self.sketchName_TF.getText()) self.local_settings.setSetting('sketchDescription', self.sketchDescription_TF.getText())
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId())+"."+file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen([self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath()+file.getName()+'\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def createAttribute(self, attributeName, attributeType, attributeDescription): skCase = Case.getCurrentCase().getSleuthkitCase() try: if "string" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription) return skCase.getAttributeType(attributeName) elif "datetime" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, attributeDescription) return skCase.getAttributeType(attributeName) elif "integer" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, attributeDescription) return skCase.getAttributeType(attributeName) elif "long" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, attributeDescription) return skCase.getAttributeType(attributeName) elif "double" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, attributeDescription) return skCase.getAttributeType(attributeName) elif "byte" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE, attributeDescription) return skCase.getAttributeType(attributeName) else: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription) return skCase.getAttributeType(attributeName) except: self.log(Level.INFO, "Attributes Creation Error ==> " + str(attributeName) + " <<>> " + str(attributeType) + " <<>> " + str(attributeDescription)) return skCase.getAttributeType(attributeName)
def add_Volatility_Dump_file(self, dataSource, dir_abstract_file_info, dump_dir, local_dir, pid_name): self.log(Level.INFO, "Adding Files from Dump Directory") self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) self.log(Level.INFO, "Parent Path is ==> " + str(dir_abstract_file_info)) #skCase = Case.getCurrentCase().getSleuthkitCase() skCase = Case.getCurrentCase().getServices().getFileManager() files = next(os.walk(dump_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(dump_dir, file) local_file = os.path.join(local_dir, file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_derived_existance(dataSource, file, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass
def indexArtifact(self, artifact): blackboard = Case.getCurrentCase().getServices().getBlackboard() try: blackboard.indexArtifact(artChat) except: pass
def shutDown(self): noDupes = list(set(md5)) outPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "GoldBuild.txt") outFile = open(outPath,'w') for line in noDupes: outFile.write(line+'\n') outFile.close()
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger = Logger.getLogger(SampleJythonDataSourceIngestModuleFactory.moduleName) # we don't know how much work there is yet progressBar.switchToIndeterminate() autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def __findGeoLocationsInDB(self, databasePath, abstractFile): if not databasePath: return try: Class.forName("org.sqlite.JDBC") #load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() except (ClassNotFoundException) as ex: self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) return except (SQLException) as ex: # Error connecting to SQL databse. return resultSet = None try: resultSet = statement.executeQuery("SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;") while resultSet.next(): timestamp = Long.valueOf(resultSet.getString("timestamp")) / 1000 latitude = Double.valueOf(resultSet.getString("latitude")) longitude = Double.valueOf(resultSet.getString("longitude")) attributes = ArrayList() artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME, "Browser Location History")) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(),moduleName, accuracy)) # NOTE: originally commented out artifact.addAttributes(attributes); try: # index the artifact for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName()) except SQLException as ex: # Unable to execute browser location SQL query against database. pass except Exception as ex: self._logger.log(Level.SEVERE, "Error putting artifacts to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) finally: try: if resultSet is not None: resultSet.close() statement.close() connection.close() except Exception as ex: # Error closing database. pass
def analyze(self, dataSource, fileManager, context): try: global wwfAccountType wwfAccountType = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addAccountType("WWF", "Words with Friends") absFiles = fileManager.findFiles(dataSource, "WordsFramework") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findWWFMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding WWF messages. pass
def add_Volatility_Dump_dir(self, dataSource, dir_abstract_file_info, dump_dir, dir_name, local_dir): skCase = Case.getCurrentCase().getSleuthkitCase() self.log(Level.INFO, " dir Name is ==> " + dir_name) self.log(Level.INFO, " abstract parentPath is ==> " + str(dir_abstract_file_info.parentPath)) self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) dev_file = os.path.join(dump_dir, dir_name) local_file = os.path.join(local_dir, dir_name) if not(self.check_derived_existance(dataSource, dir_name, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(dir_name, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) # self.context.addFilesToJob(df_list) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass #self.log(Level.INFO, " derived File Is ==> " + str(derived_file)) fileManager = Case.getCurrentCase().getServices().getFileManager() new_derived_file = fileManager.findFiles(dataSource, dir_name, dir_abstract_file_info.parentPath) numFiles = len(new_derived_file) self.log(Level.INFO, " print number of files is " + str(numFiles)) for file in new_derived_file: self.log(Level.INFO, "File Exists ==> " + str(file)) self.log(Level.INFO, "Local Directory ==> " + str(file.localPath)) self.log(Level.INFO, "Local Directory ==> " + local_file) if local_file == file.localPath: self.log(Level.INFO, "File Exists ==> " + str(file)) return file self.log(Level.INFO, "File Exists2 ==> " + str(new_derived_file[0])) return new_derived_file[0]
def createArtifact(self, artifactName, artifactDescription): skCase = Case.getCurrentCase().getSleuthkitCase(); try: artId = skCase.addArtifactType(artifactName, artifactDescription) return skCase.getArtifactTypeID(artifactName) except: #self.log(Level.INFO, "Artifacts Creation Error for artifact ==> " + str(artifactName) + " <<>> " + artifactDescription) return skCase.getArtifactTypeID(artifactName)
def get_artifacts(self): sql_statement = "select distinct(type_name) 'type_name' from blackboard_artifacts a, blackboard_artifact_types b " + \ " where a.artifact_type_id = b.artifact_type_id;" skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): self.artifact_list.append(resultSet.getString("type_name")) dbquery.close()
def find_tags(self): sql_statement = "SELECT distinct(display_name) u_tag_name FROM content_tags INNER JOIN tag_names ON " + \ " content_tags.tag_name_id = tag_names.tag_name_id;" skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): self.tag_list.append(resultSet.getString("u_tag_name")) dbquery.close()
def check_dervived_existance(self, dataSource, file_name, parent_file_abstract): fileManager = Case.getCurrentCase().getServices().getFileManager() dervived_file = fileManager.findFiles(dataSource, file_name, parent_file_abstract) numFiles = len(dervived_file) if numFiles == 0: return True else: return False
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and delete the incomplete report file # Do not add report to the case tree if the ingest is cancelled before finish. # This can be done by using IngestJobContext.dataSourceIngestIsCancelled # See: http://sleuthkit.org/autopsy/docs/api-docs/4.7.0/_ingest_job_context_8java.html self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd); processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder,DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning,"Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For an example, we will flag files with .txt in the name and make a blackboard artifact. if file.getName().lower().endswith(".txt"): self.log(Level.INFO, "Found a text file: " + file.getName()) self.filesFound+=1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonFileIngestModuleFactory.moduleName, "Text Files") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(SampleJythonFileIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) # For the example (this wouldn't be needed normally), we'll query the blackboard for data that was added # by other modules. We then iterate over its attributes. We'll just print them, but you would probably # want to do something with them. artifactList = file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) for artifact in artifactList: attributeList = artifact.getAttributes() for attrib in attributeList: self.log(Level.INFO, attrib.toString()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # For an example, we write a file with the number of files created in the past 2 weeks # Configure progress bar for 2 tasks progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(2) # Find epoch time of when 2 weeks ago was currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) # (days * hours * minutes * seconds) # Query the database for files that meet our criteria sleuthkitCase = Case.getCurrentCase().getSleuthkitCase() files = sleuthkitCase.findAllFilesWhere("crtime > %d" % minTime) fileCount = 0 for file in files: fileCount += 1 # Could do something else here and write it to HTML, CSV, etc. # Increment since we are done with step #1 progressBar.increment() # Write the count to the report file. fileName = os.path.join(baseReportDir, self.getRelativeFilePath()) report = open(fileName, 'w') report.write("file count = %d" % fileCount) report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport(fileName, self.moduleName, "File Count Report"); progressBar.increment() # Call this with ERROR if report was not generated progressBar.complete(ReportStatus.COMPLETE)
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "tc.db") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findTangoMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Tango messages. pass
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "logs.db") absFiles.addAll(fileManager.findFiles(dataSource, "contacts.db")) absFiles.addAll(fileManager.findFiles(dataSource, "contacts2.db")) for abstractFile in absFiles: try: file = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, file, context.dataSourceIngestIsCancelled) self.__findCallLogsInDB(file.toString(), abstractFile, dataSource) except IOException as ex: self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding call logs. pass
def analyze(self, dataSource, fileManager, context): try: abstractFiles = fileManager.findFiles(dataSource, "CachedGeoposition%.db") for abstractFile in abstractFiles: if abstractFile.getSize() == 0: continue try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing browser location files", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding browser location files. pass
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "da_destination_history") if absFiles.isEmpty(): return for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Google map locations. pass
def Create_Diskpart_Script(self, size_of_disk, vdisk_name): # get the directory to store the scripts used by diskpart for x in range(68, 90): try: if not os.path.exists(chr(x) + ":"): pass except: open_drive = chr(x) + ":" break #open_drives = [ chr(x) + ": " for x in range(68,90) if not os.path.exists(chr(x) + ":") ] vdisk_script_dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "vdisk_scripts") try: os.mkdir(vdisk_script_dir) except: self.log(Level.INFO, "Vdisk script directory already exists") # script names vdisk_create_script = os.path.join(vdisk_script_dir, "create_vdisk.txt") vdisk_unmount_script = os.path.join(vdisk_script_dir, "unmount_vdisk.txt") vdisk_mount_script = os.path.join(vdisk_script_dir, "mount_vdisk.txt") # Create create, mount and format script vdc = open(vdisk_create_script, "w") vdc.write('create vdisk file="' + vdisk_name + '" maximum=' + str(size_of_disk) + " type=expandable \n") vdc.write("attach vdisk \n") vdc.write("create partition primary \n") vdc.write('format fs=ntfs label="Preview" quick \n') vdc.write("assign letter=" + open_drive + " \n") vdc.close() # Create Mount script vdc = open(vdisk_mount_script, "w") vdc.write('select vdisk file="' + vdisk_name + '"\n') vdc.write("attach vdisk \n") vdc.close() # Create Unmount script vdc = open(vdisk_unmount_script, "w") vdc.write('select vdisk file="' + vdisk_name + '"\n') vdc.write("detach vdisk \n") vdc.close() return vdisk_create_script, vdisk_unmount_script, vdisk_mount_script, open_drive
def check_derived_existance(self, dataSource, file_name, parent_file_path): self.log(Level.INFO, "File Name is ==> " + str(file_name) + " <==> Parent File Dir ==> " + str(parent_file_path)) fileManager = Case.getCurrentCase().getServices().getFileManager() derived_file = fileManager.findFiles(dataSource, file_name, parent_file_path) numFiles = len(derived_file) if numFiles == 0: self.log(Level.INFO, "File Does Not Exists ==> " + str(file_name)) return False else: for file in derived_file: self.log(Level.INFO, "File Exists ==> " + str(file_name)) if parent_file_path == file.parentPath: self.log(Level.INFO, "File Exists ==> " + str(file_name)) return True self.log(Level.INFO, "File Does Not Exists ==> " + str(file_name)) return False
def process(self, dataSource, progressBar): errors = [] fileManager = Case.getCurrentCase().getServices().getFileManager() analyzers = [contact.ContactAnalyzer(), calllog.CallLogAnalyzer(), textmessage.TextMessageAnalyzer(), tangomessage.TangoMessageAnalyzer(), wwfmessage.WWFMessageAnalyzer(), googlemaplocation.GoogleMapLocationAnalyzer(), browserlocation.BrowserLocationAnalyzer(), cachelocation.CacheLocationAnalyzer()] self.log(Level.INFO, "running " + str(len(analyzers)) + " analyzers") progressBar.switchToDeterminate(len(analyzers)) n = 0 for analyzer in analyzers: if self.context.dataSourceIngestIsCancelled(): return IngestModule.ProcessResult.OK try: analyzer.analyze(dataSource, fileManager, self.context) n += 1 progressBar.progress(n) except Exception as ex: errors.append("Error running " + analyzer.__class__.__name__) self.log(Level.SEVERE, traceback.format_exc()) errorMessage = [] # NOTE: this isn't used? errorMessageSubject = "" # NOTE: this isn't used? msgLevel = IngestMessage.MessageType.INFO if errors: msgLevel = IngestMessage.MessageType.ERROR errorMessage.append("Errors were encountered") errorMessage.append("<ul>") # NOTE: this was missing in the original java code for msg in errors: errorMessage.extend(["<li>", msg, "</li>\n"]) errorMessage.append("</ul>\n") if len(errors) == 1: errorMsgSubject = "One error was found" else: errorMsgSubject = "errors found: " + str(len(errors)) else: errorMessage.append("No errors") errorMsgSubject = "No errors" return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "SRUDB.DAT") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: temp_dir = os.path.join(Temp_Dir, "SRUDB") os.mkdir(temp_dir) except: self.log(Level.INFO, "SRUDB Directory already exists " + Temp_Dir) temp_file = "" # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) temp_file = lclDbPath # Run the executable, saving output to a sqlite database self.log( Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " == > " + temp_file + " Parm 2 ==> " + Temp_Dir + "\SRUDB.db3") subprocess.Popen( [self.path_to_exe, temp_file, os.path.join(Temp_Dir, "SRUDB.db3")]).communicate()[0] for file in files: # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "SRUDB.db3") self.log(Level.INFO, "Path the SRUDB database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. for SR_table_name in self.List_Of_SRUDB: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('" + SR_table_name + "'); ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log( Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log( Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") self.log( Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_" + table_name.upper() artifact_desc = "System Resource Usage " + table_name.upper( ) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_amc = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_sru = skCase.getArtifactTypeID(artifact_name) artID_sru_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append( resultSet2.getString("name").upper()) Column_Types.append( resultSet2.getString("type").upper()) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sru) Column_Number = 1 for col_name in Column_Names: self.log( Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") self.log( Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSRUDBIngestModuleFactory. moduleName, resultSet3.getString( Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSRUDBIngestModuleFactory. moduleName, resultSet3.getString( Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute( BlackboardAttribute( attID_ex1, ParseSRUDBIngestModuleFactory. moduleName, long( resultSet3.getInt( Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseSRUDBIngestModuleFactory.moduleName, artID_sru_evt, None)) except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(temp_file) except: self.log( Level.INFO, "removal of SRUDB file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of SRUDB directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "System Resourse Usage DB", " SRUDB Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def add_relationship(node1, node2, art, relationship_type, timestamp): Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager( ).addRelationships(node1, node2, art, relationship_type, timestamp)
def __findGeoLocationsInDB(self, databasePath, abstractFile): if not databasePath: return try: Class.forName("org.sqlite.JDBC") #load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() except (ClassNotFoundException, SQLException) as ex: self._logger.log(Level.SEVERE, "Error connecting to SQL database", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) return try: resultSet = statement.executeQuery( "SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;" ) while resultSet.next(): timestamp = Long.valueOf( resultSet.getString("timestamp")) / 1000 latitude = Double.valueOf(resultSet.getString("latitude")) longitude = Double.valueOf(resultSet.getString("longitude")) artifact = abstractFile.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME, "Browser Location History")) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(),moduleName, accuracy)) # NOTE: originally commented out try: # index the artifact for keyword search blackboard = Case.getCurrentCase().getServices( ).getBlackboard() blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as ex: self._logger.log( Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactTypeName(), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error( "Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName()) except Exception as ex: self._logger.log(Level.SEVERE, "Error putting artifacts to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) finally: try: if resultSet is not None: resultSet.close() statement.close() connection.close() except Exception as ex: self._logger.log(Level.SEVERE, "Error closing database", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Hive files to extract filesToExtract = ("SAM", "SYSTEM") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() # Create BAM directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "bam") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "bam Directory already exists " + temp_dir) # Setup variables to use to store information systemHiveFile = [] userRids = {} bamRecord = [] for fileName in filesToExtract: files = fileManager.findFiles(dataSource, fileName, "Windows/System32/Config") numFiles = len(files) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Check path to only get the hive files in the config directory and no others if file.getParentPath().upper() == '/WINDOWS/SYSTEM32/CONFIG/': # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(filePath)) # Save SYSTEM Hive abstract file information to use later if file.getName() == 'SYSTEM': systemHiveFile = file bamRecord = self.processSYSTEMHive(filePath) elif file.getName() == 'SAM': # Get information from the SAM file returns dictionary with key of rid and value of user name userRids = self.processSAMHive(filePath) # Setup Artifact try: self.log(Level.INFO, "Begin Create New Artifacts") artID_ls = skCase.addArtifactType( "TSK_BAM_KEY", "BAM Registry Key") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artifactName = "TSK_BAM_KEY" artId = skCase.getArtifactTypeID(artifactName) moduleName = BamKeyIngestModuleFactory.moduleName # Attributes to use TSK_USER_NAME, TSK_PROG_NAME, TSK_DATETIME for bamRec in bamRecord: attributes = ArrayList() art = systemHiveFile.newArtifact(artId) self.log(Level.INFO, "BamRec ==> " + str(bamRec)) if bamRec[0] in userRids.keys(): attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), moduleName, userRids[bamRec[0]])) else: attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), moduleName, bamRec[0])) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), moduleName, bamRec[1])) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), moduleName, int(bamRec[2]))) art.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artChat) except: self._logger.log(Level.WARNING, "Error indexing artifact " + art.getDisplayName()) #Clean up prefetch directory and files try: shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of directory tree failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "BamKey", " BamKey Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def __findGeoLocationsInFile(self, file, abstractFile): tempBytes = bytearray( [0] * 2 ) # will temporarily hold bytes to be converted into the correct data types try: inputStream = FileInputStream(file) inputStream.read(tempBytes) # version tempBytes = bytearray([0] * 2) inputStream.read(tempBytes) # number of location entries iterations = BigInteger(tempBytes).intValue() for i in range(iterations): # loop through every entry tempBytes = bytearray([0] * 2) inputStream.read(tempBytes) tempBytes = bytearray([0]) inputStream.read(tempBytes) while BigInteger(tempBytes).intValue( ) != 0: # pass through non important values until the start of accuracy(around 7-10 bytes) if 0 > inputStream.read(tempBytes): break # we've passed the end of the file, so stop tempBytes = bytearray([0] * 3) inputStream.read(tempBytes) if BigInteger(tempBytes).intValue( ) <= 0: # This refers to a location that could not be calculated tempBytes = bytearray([0] * 28) # read rest of the row's bytes inputStream.read(tempBytes) continue accuracy = "" + BigInteger(tempBytes).intValue() tempBytes = bytearray([0] * 4) inputStream.read(tempBytes) confidence = "" + BigInteger(tempBytes).intValue() tempBytes = bytearray([0] * 8) inputStream.read(tempBytes) latitude = CacheLocationAnalyzer.toDouble(bytes) tempBytes = bytearray([0] * 8) inputStream.read(tempBytes) longitude = CacheLocationAnalyzer.toDouble(bytes) tempBytes = bytearray([0] * 8) inputStream.read(tempBytes) timestamp = BigInteger(tempBytes).longValue() / 1000 artifact = abstractFile.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, AndroidAnalyzer.MODULE_NAME, latitude)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, AndroidAnalyzer.MODULE_NAME, longitude)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, AndroidModuleFactorymodule.Name, timestamp)) artifact.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, AndroidAnalyzer.MODULE_NAME, file.getName() + "Location History")) #Not storing these for now. # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy)) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence)) try: # index the artifact for keyword search blackboard = Case.getCurrentCase().getServices( ).getBlackboard() blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as ex: self._logger.log( Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error( "Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName()) except Exception as ex: self._logger.log( Level.SEVERE, "Error parsing Cached GPS locations to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def generateReport(self, baseReportDir, progressBar): username = "" dispname = "" phone = "" user_id = "" friends_array = [] feeds_array = [] message_array = [] # Open the output file. fileName = os.path.join(baseReportDir, self.getRelativeFilePath()) report = open(fileName, 'w') report.write("SNAPCHAT REPORT\n") report.write( "This report harvests and displays the user data and communications from the Snapchat Application\n" ) report.write( "-----------------------------------------------------------------------------------------------------------------------\n\n" ) # Query the database for the files (ignore the directories) dataSources = Case.getCurrentCase().getDataSources() fileManager = Case.getCurrentCase().getServices().getFileManager() for dataSource in dataSources: maindb = fileManager.findFiles(dataSource, 'main.db', 'com.snapchat.android/databases') userprefs = fileManager.findFiles( dataSource, 'user_session_shared_pref.xml', 'com.snapchat.android/shared_prefs') #Get user details from user_session_shared_pref.xml for file in userprefs: prefspath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".xml") ContentUtils.writeToFile(file, File(prefspath)) tree = et.parse(prefspath) root = tree.getroot() for string in root.findall('string'): if string.get('name') == "key_display_name": dispname = string.text if string.get('name') == "key_username": username = string.text if string.get('name') == "key_phone": phone = string.text if string.get('name') == "key_user_id": user_id = string.text report.write("SNAPCHAT PROFILE.\n") report.write("Username: "******"\n") report.write("Display Name: " + dispname + "\n") report.write("Registered Phone: " + phone + "\n") report.write("User ID: " + user_id + "\n\n") report.write("----------------------------------------------\n") for file in maindb: lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) try: #Open database Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: report.write("Can't open db: " + e.getMessage() + "\n") #Query DB for friends details, only those that have been added try: stmt = dbConn.createStatement() friendsSet = stmt.executeQuery( 'SELECT _id, username, userId, displayName, addedTimestamp FROM Friend WHERE addedTimestamp IS NOT NULL;' ) report.write("CONTACTS.\n") except SQLException as e: report.write("Can't query Friend db: " + e.getMessage() + "\n") while friendsSet.next(): try: f_username = friendsSet.getString('username') f_userid = friendsSet.getString('userId') f_display = friendsSet.getString('displayName') f_addtime = friendsSet.getString('addedTimestamp') if f_addtime is not None: f_addtime = self.timeStampConverter(float(f_addtime)) else: f_addtime = "Not added" f_id = friendsSet.getString('_id') if f_username != "system_user_id": f = [f_userid, f_username, f_display, f_addtime, f_id] friends_array.append(f) except SQLException as e: report.write("Error getting values: " + e.getMessage() + "\n") for friend in friends_array: report.write(friend[2] + " (" + friend[1] + ") --- " + "ID: " + friend[0] + " --- Added on: " + friend[3] + ".\n") stmt.close() ############################################################################################################# #Get details of stories posted by self and others try: stmt = dbConn.createStatement() stories = stmt.executeQuery( 'SELECT username, captionTextDisplay, viewed, expirationTimestamp from StorySnap;' ) except SQLException as e: report.write("Can't query story table: " + e.getMessage() + "\n") stories_array = [] while stories.next(): try: un = stories.getString('username') caption = stories.getString('captionTextDisplay') viewed = stories.getString('viewed') expires = stories.getString('expirationTimestamp') if caption is None: caption = "#NO TEXT#" if viewed == "1": viewed = "has" else: viewed = "has not" stories_array.append([ un, caption, viewed, self.timeStampConverter(float(expires)) ]) except SQLException as e: report.write("Error getting stories: " + e.getMessage() + "\n") report.write("----------------------------------------------\n") report.write("Stories\n") for story in stories_array: report.write(" + " + story[0].capitalize() + " posted a story saying '" + story[1] + "'. -- Expires: " + story[3] + " -- It " + story[2] + " been viewed.\n") stmt.close() ############################################################################################################# #Query db for list of conversations (feeds) try: stmt = dbConn.createStatement() feedSet = stmt.executeQuery( 'SELECT _id, key, specifiedName, participantString from Feed;' ) except SQLException as e: report.write("Can't query Feed table: " + e.getMessage() + "\n") while feedSet.next(): try: feed_id = feedSet.getString('_id') feedKey = feedSet.getString('key') feedName = feedSet.getString('specifiedName') feedParts = feedSet.getString('participantString') if feedName is None: feedName = "#NO NAME#" if feedParts is None: feedParts = feedKey.split("~")[1] feeds_array.append([feed_id, feedKey, feedName, feedParts]) except SQLException as e: report.write("Error getting values: " + e.getMessage() + "\n") report.write("----------------------------------------------\n") report.write("FEEDS\n") for feed in feeds_array: report.write("Feed: " + feed[1] + "(" + feed[2] + "). ---- Participants: " + feed[3] + ".\n") stmt.close() ######################################################################################################################################### #Query DB for list of messages report.write("----------------------------------------------\n") report.write("MESSAGES\n") try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( 'SELECT timestamp, feedRowId, senderId, type, mediaType, mediaTimerSec, hex(content), savedStates FROM Message;' ) except SQLException as e: report.write("Error executing message query " + e.getMessage() + ".\n") while resultSet.next(): try: messageTime = float(resultSet.getString("timestamp")) messageFeedRow = resultSet.getString("feedRowId") messageSender = resultSet.getString("senderId") for friend in friends_array: if str(friend[4]) == str(messageSender): messageSender = friend[2] + "(" + friend[1] + ")" break messageType = resultSet.getString("type") messageMedTyp = resultSet.getString("mediaType") messageTimSec = resultSet.getString("mediaTimerSec") messageContent = resultSet.getString("hex(content)") if messageContent is None: messageContent = "No content" else: converted = [] conthex = [ messageContent[i:i + 2] for i in range(0, len(messageContent), 2) ] for char in conthex: h = int(char, 16) if h >= 32 and h <= 126: h = hex(h).replace("0x", "") h = h.decode("hex") converted.append(h) messageContent = ''.join(converted) messageSaved = resultSet.getString("savedStates") message_array.append([ messageTime, messageFeedRow, messageSender, messageType, messageMedTyp, messageTimSec, messageContent, messageSaved ]) except SQLException as e: report.write("\n Loop is Buggered \n") break for feed in feeds_array: report.write("\n -- " + feed[1] + " --\n") #Write details for each message in feed for message in message_array: if feed[0] == message[1]: if message[3] == "text": #Message is purely text report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a " + message[3] + " saying '" + message[6] + "'.\n") elif message[3] == "snap": #Message is a "snap" or image report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a " + message[3] + " .\n") elif message[3] == "erased_message": #Message was erased report.write( self.timeStampConverter(message[0]) + ": From: " + message[2] + ". Messaged erased.\n") elif message[3] == "cognac_close": #User initiated a game report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " played a game. .\n") elif message[3] == "sticker_v3": #Sticker was sent report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a sticker.\n") elif message[3] == "media_v4": #A form of media was sent report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a media item.\n") elif message[3] == "audio_note": #A voice recording was sent report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent an audio note.\n") elif message[3] == "welcome_message": #One of Team Snapchat's welcome messages was sent ( Always present ) report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a Welcome Message saying '" + message[6] + "'.\n") elif message[3].endswith("call"): #A form of call interaction occurred if messageType == "joined_call": report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- Joined a call.\n") if messageType == "left_call": report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- Left a call.\n") if messageType == "missed_video_call": report.write( self.timeStampConverter(message[0]) + ": --- Missed video call from " + message[2] + ".\n") elif message[3] == "screenshot": report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- took a screenshot of the chat. \n") else: report.write( self.timeStampConverter(message[0]) + ": " + message[2] + " --- sent a " + message[3] + ".\n") # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport(fileName, self.moduleName, "Snapchat TXT") progressBar.complete(ReportStatus.COMPLETE)
def process(self, dataSource, progressBar): # We don't know how much work there is yet. progressBar.switchToIndeterminate() # Get the case database and its blackboard. skCase = Case.getCurrentCase().getSleuthkitCase() blackboard = skCase.getBlackboard() # Get any files with a .gpx extension. # It would perhaps be better to get these files by MIME type instead. # RC: It would also be better if this were a file level ingest module so it could process files extracted from archives. fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.gpx") # Update the progress bar now that we know how much work there is to do. numFiles = len(files) if self.writeDebugMsgs: self.log(Level.INFO, "Found " + str(numFiles) + " GPX files") progressBar.switchToDeterminate(numFiles) # Get the module name, it will be needed for adding attributes moduleName = GPXParserDataSourceIngestModuleFactory.moduleName # Check if a folder for this module is present in the case Temp directory. # If not, create it. dirName = os.path.join(Case.getCurrentCase().getTempDirectory(), "GPX_Parser_Module") try: os.stat(dirName) except: os.mkdir(dirName) # Create a temp file name. It appears that we cannot close and delete # this file, but we can overwrite it for each file we need to process. fileName = os.path.join(dirName, "tmp.gpx") fileCount = 0 for file in files: # Create a GeoArtifactsHelper for this file. geoArtifactHelper = GeoArtifactsHelper(skCase, moduleName, None, file) # Check if the user pressed cancel while we were busy. if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if self.writeDebugMsgs: self.log( Level.INFO, "Processing " + file.getUniquePath() + " (objID = " + str(file.getId()) + ")") fileCount += 1 # Write the file so that it can be parsed by gpxpy. localFile = File(fileName) ContentUtils.writeToFile(file, localFile) # Send the file to gpxpy for parsing. gpxfile = open(fileName) try: gpx = gpxpy.parse(gpxfile) if self.writeDebugMsgs: self.log( Level.INFO, "Parsed " + file.getUniquePath() + " (objID = " + str(file.getId()) + ")") except Exception as e: self.log( Level.WARNING, "Error parsing file " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + str(e)) continue if gpx: if self.writeDebugMsgs: self.log( Level.INFO, "Processing tracks from " + file.getUniquePath() + " (objID = " + str(file.getId()) + ")") for track in gpx.tracks: for segment in track.segments: geoPointList = TskGeoTrackpointsUtil.GeoTrackPointList( ) for point in segment.points: elevation = 0 if point.elevation != None: elevation = point.elevation timeStamp = 0 try: if (point.time != None): timeStamp = long( time.mktime(point.time.timetuple())) except Exception as e: self.log( Level.WARNING, "Error getting track timestamp from " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + str(e)) geoPointList.addPoint( GeoTrackPoint(point.latitude, point.longitude, elevation, None, 0, 0, 0, timeStamp)) try: geoArtifactHelper.addTrack("Track", geoPointList, None) except Blackboard.BlackboardException as e: self.log( Level.SEVERE, "Error posting GPS track artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) except TskCoreException as e: self.log( Level.SEVERE, "Error creating GPS track artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) if self.writeDebugMsgs: self.log( Level.INFO, "Processing waypoints from " + file.getUniquePath() + " (objID = " + str(file.getId()) + ")") for waypoint in gpx.waypoints: try: art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK) attributes = ArrayList() attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_GEO_LATITUDE.getTypeID(), moduleName, waypoint.latitude)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_GEO_LONGITUDE.getTypeID(), moduleName, waypoint.longitude)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FLAG. getTypeID(), moduleName, "Waypoint")) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME. getTypeID(), moduleName, waypoint.name)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_PROG_NAME.getTypeID(), moduleName, "GPXParser")) art.addAttributes(attributes) blackboard.postArtifact(art, moduleName) except Blackboard.BlackboardException as e: self.log( Level.SEVERE, "Error posting GPS bookmark artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) except TskCoreException as e: self.log( Level.SEVERE, "Error creating GPS bookmark artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) if self.writeDebugMsgs: self.log( Level.INFO, "Processing routes from " + file.getUniquePath() + " (objID = " + str(file.getId()) + ")") for route in gpx.routes: geoWaypointList = TskGeoWaypointsUtil.GeoWaypointList() for point in route.points: geoWaypointList.addPoint( GeoWaypoint(point.latitude, point.longitude, point.elevation, point.name)) try: geoArtifactHelper.addRoute(None, None, geoWaypointList, None) except Blackboard.BlackboardException as e: self.log("Error posting GPS route artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) except TskCoreException as e: self.log( Level.SEVERE, "Error creating GPS route artifact for " + file.getUniquePath() + " (objID = " + str(file.getId()) + "):" + e.getMessage()) # Update the progress bar. progressBar.progress(fileCount) # Post a message to the ingest messages inbox. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, moduleName, "Processed %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "SAM", "config") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\SAM") except: self.log(Level.INFO, "SAM Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\SAM", file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Temp_Dir + "\\SAM.db3") subprocess.Popen([ self.path_to_exe, Temp_Dir + "\\SAM\\SAM", Temp_Dir + "\\SAM.db3" ]).communicate()[0] for file in files: # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "SAM.db3") #lclDbPath = "C:\\Users\\Forensic_User\\OneDrive\\Code\\Python_Scripts\\SRUDB\SRUDB.DB3" self.log(Level.INFO, "Path the SAM database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log( Level.INFO, "Error querying database for SAM table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_sam = skCase.addArtifactType("TSK_SAM", "SAM File") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_sam = skCase.getArtifactTypeID("TSK_SAM") artID_sam_evt = skCase.getArtifactType("TSK_SAM") # Cycle through each row and create artifacts while resultSet.next(): try: self.log( Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append( resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type") == "text": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sam) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "text": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSAMIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute( BlackboardAttribute( attID_ex1, ParseSAMIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\SAM\\" + file.getName()) except: self.log( Level.INFO, "removal of SAM file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir + "\\SAM") except: self.log(Level.INFO, "removal of SAM directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseSAMIngestModuleFactory.moduleName, artID_sam_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "SAM Parser", " SAM Has Been Analyzed ") IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseSAMIngestModuleFactory.moduleName, artID_sam_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_pf = skCase.addArtifactType("TSK_PREFETCH", "Windows Prefetch") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") # Create the attribute type, if it exists then catch the error try: attID_pf_fn = skCase.addArtifactAttributeType( "TSK_PREFETCH_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Prefetch File Name") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch File Name. ==> ") try: attID_pf_an = skCase.addArtifactAttributeType( "TSK_PREFETCH_ACTUAL_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Actual File Name") except: self.log(Level.INFO, "Attributes Creation Error, Actual File Name. ==> ") try: attID_nr = skCase.addArtifactAttributeType( "TSK_PF_RUN_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Program Number Runs") except: self.log(Level.INFO, "Attributes Creation Error, Program Number Runs. ==> ") try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_1", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 1") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 1. ==> ") try: attID_ex2 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_2", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 2") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 2. ==> ") try: attID_ex3 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_3", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 3") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_ex4 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_4", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 4") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 4 ==> ") try: attID_ex5 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_5", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 5") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 5. ==> ") try: attID_ex6 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_6", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 6") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 6. ==> ") try: attID_ex7 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_7", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 7") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 7. ==> ") try: attID_ex8 = skCase.addArtifactAttributeType( "TSK_PF_EXEC_DTTM_8", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 8") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 8 ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") artID_pf_evt = skCase.getArtifactType("TSK_PREFETCH") attID_pf_fn = skCase.getAttributeType("TSK_PREFETCH_FILE_NAME") attID_pf_an = skCase.getAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME") attID_nr = skCase.getAttributeType("TSK_PF_RUN_COUNT") attID_ex1 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_1") attID_ex2 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_2") attID_ex3 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_3") attID_ex4 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_4") attID_ex5 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_5") attID_ex6 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_6") attID_ex7 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_7") attID_ex8 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_8") # Used to crossref ADS prefetch files prefetchFileName = {} # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the prefetch files and the layout.ini file from the /windows/prefetch folder fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.pf") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create prefetch directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "Prefetch_Files") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "Prefetch Directory already exists " + Temp_Dir) # Write out each prefetch file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions fileName = file.getName() if (":" in fileName): fileName = fileName.replace(":", "-") prefetchFileName[fileName] = file else: prefetchFileName[fileName] = file lclDbPath = os.path.join(Temp_Dir, fileName) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Case.getCurrentCase().getTempDirectory()) subprocess.Popen([ self.path_to_exe, Temp_Dir, os.path.join(Temp_Dir, "Autopsy_PF_DB.db3") ]).communicate()[0] # Set the database to be read to the once created by the prefetch parser program lclDbPath = os.path.join(Temp_Dir, "Autopsy_PF_DB.db3") self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "Select prefetch_File_Name, actual_File_Name, Number_time_file_run, " + " Embeded_date_Time_Unix_1, " + " Embeded_date_Time_Unix_2, " + " Embeded_date_Time_Unix_3, " + " Embeded_date_Time_Unix_4, " + " Embeded_date_Time_Unix_5, " + " Embeded_date_Time_Unix_6, " + " Embeded_date_Time_Unix_7, " + " Embeded_date_Time_Unix_8 " + " from prefetch_file_info ") except SQLException as e: self.log( Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log( Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") Prefetch_File_Name = resultSet.getString("Prefetch_File_Name") Actual_File_Name = resultSet.getString("Actual_File_Name") Number_Of_Runs = resultSet.getString("Number_Time_File_Run") Time_1 = resultSet.getInt("Embeded_date_Time_Unix_1") Time_2 = resultSet.getInt("Embeded_date_Time_Unix_2") Time_3 = resultSet.getInt("Embeded_date_Time_Unix_3") Time_4 = resultSet.getInt("Embeded_date_Time_Unix_4") Time_5 = resultSet.getInt("Embeded_date_Time_Unix_5") Time_6 = resultSet.getInt("Embeded_date_Time_Unix_6") Time_7 = resultSet.getInt("Embeded_date_Time_Unix_7") Time_8 = resultSet.getInt("Embeded_date_Time_Unix_8") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") file = prefetchFileName[Prefetch_File_Name] # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) art = file.newArtifact(artID_pf) #self.log(Level.INFO, "Attribute Number ==>" + str(attID_pf_fn) + " " + str(attID_pf_an) ) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_pf_fn, ParsePrefetchDbIngestModuleFactory.moduleName, file.getName())), \ (BlackboardAttribute(attID_pf_an, ParsePrefetchDbIngestModuleFactory.moduleName, Actual_File_Name)), \ (BlackboardAttribute(attID_nr, ParsePrefetchDbIngestModuleFactory.moduleName, Number_Of_Runs)), \ (BlackboardAttribute(attID_ex1, ParsePrefetchDbIngestModuleFactory.moduleName, Time_1)), \ (BlackboardAttribute(attID_ex2, ParsePrefetchDbIngestModuleFactory.moduleName, Time_2)), \ (BlackboardAttribute(attID_ex3, ParsePrefetchDbIngestModuleFactory.moduleName, Time_3)), \ (BlackboardAttribute(attID_ex4, ParsePrefetchDbIngestModuleFactory.moduleName, Time_4)), \ (BlackboardAttribute(attID_ex5, ParsePrefetchDbIngestModuleFactory.moduleName, Time_5)), \ (BlackboardAttribute(attID_ex6, ParsePrefetchDbIngestModuleFactory.moduleName, Time_6)), \ (BlackboardAttribute(attID_ex7, ParsePrefetchDbIngestModuleFactory.moduleName, Time_7)), \ (BlackboardAttribute(attID_ex8, ParsePrefetchDbIngestModuleFactory.moduleName, Time_8)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) # Clean up try: stmt.close() dbConn.close() os.remove(lclDbPath) except: self.log(Level.INFO, "could not remove the prefetch database " + lclDbPath) #Clean up prefetch directory and files for file in files: try: os.remove(os.path.join(Temp_Dir, file.getName())) except: self.log( Level.INFO, "removal of prefetch file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of prefetch directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Prefetch Analyzer", " Prefetch Has Been Analyzed ") IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase() # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat1 = skCase.addArtifactType("TSK_FH_CATALOG_1", "File History Catalog 1") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 1. ==> ") artID_cat1 = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat2 = skCase.addArtifactType("TSK_FH_CATALOG_2", "File History Catalog 2") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 2. ==> ") artID_cat2 = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") # Create the attribute type, if it exists then catch the error try: attID_fh_pn = skCase.addArtifactAttributeType( 'TSK_FH_PATH', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Parent Path") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch Parent Path. ==> ") try: attID_fh_fn = skCase.addArtifactAttributeType( 'TSK_FH_FILE_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fh_fs = skCase.addArtifactAttributeType( 'TSK_FH_FILE_SIZE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_fh_usn = skCase.addArtifactAttributeType( 'TSK_FH_USN_JOURNAL_ENTRY', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "USN Journal Entry") except: self.log(Level.INFO, "Attributes Creation Error, USN Journal Entry. ==> ") try: attID_fh_fc = skCase.addArtifactAttributeType( 'TSK_FH_FILE_CREATED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Created") except: self.log(Level.INFO, "Attributes Creation Error, File Created. ==> ") try: attID_fh_fm = skCase.addArtifactAttributeType( 'TSK_FH_FILE_MODIFIED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Modified") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_fh_bq = skCase.addArtifactAttributeType( 'TSK_FH_BACKUP_QUEUED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Queued") except: self.log(Level.INFO, "Attributes Creation Error, Backup Queued ==> ") try: attID_fh_bc = skCase.addArtifactAttributeType( 'TSK_FH_BACKUP_CREATED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Created") except: self.log(Level.INFO, "Attributes Creation Error, Backup Created ==> ") try: attID_fh_bcp = skCase.addArtifactAttributeType( 'TSK_FH_BACKUP_CAPTURED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Captured") except: self.log(Level.INFO, "Attributes Creation Error, Backup Captured. ==> ") try: attID_fh_bu = skCase.addArtifactAttributeType( 'TSK_FH_BACKUP_UPDATED', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Updated") except: self.log(Level.INFO, "Attributes Creation Error, Backup Updated. ==> ") try: attID_fh_bv = skCase.addArtifactAttributeType( 'TSK_FH_BACKUP_VISIBLE', BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Visible") except: self.log(Level.INFO, "Attributes Creation Error, Backup Visible ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created #artID_wfh = skCase.getArtifactTypeID("TSK_PREFETCH") #artID_cat1 = skCase.getArtifactType("TSK_FH_CATALOG_1") #artID_cat2 = skCase.getArtifactType("TSK_FH_CATALOG_2") attID_fh_pn = skCase.getAttributeType("TSK_FH_PATH") attID_fh_fn = skCase.getAttributeType("TSK_FH_FILE_NAME") attID_fh_fs = skCase.getAttributeType("TSK_FH_FILE_SIZE") attID_fh_usn = skCase.getAttributeType("TSK_FH_USN_JOURNAL_ENTRY") attID_fh_fc = skCase.getAttributeType("TSK_FH_FILE_CREATED") attID_fh_fm = skCase.getAttributeType("TSK_FH_FILE_MODIFIED") attID_fh_bq = skCase.getAttributeType("TSK_FH_BACKUP_QUEUED") attID_fh_bc = skCase.getAttributeType("TSK_FH_BACKUP_CREATED") attID_fh_bcp = skCase.getAttributeType("TSK_FH_BACKUP_CAPTURED") attID_fh_bu = skCase.getAttributeType("TSK_FH_BACKUP_UPDATED") attID_fh_bv = skCase.getAttributeType("TSK_FH_BACKUP_VISIBLE") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the file history files from the users folders fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%edb", "%/Windows/FileHistory/%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create file history directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "File_History") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "File_History Directory already exists " + Temp_Dir) # Write out each catalog esedb database to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName() + "_" + str(file.getId())) db_name = os.path.splitext(file.getName())[0] lclSQLPath = os.path.join( Temp_Dir, db_name + "_" + str(file.getId()) + ".db3") ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database if PlatformUtil.isWindowsOS(): self.log( Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath], stdout=PIPE, stderr=PIPE) else: self.log( Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([ self.path_to_exe, lclDbPath, lclSQLPath, os.path.dirname(os.path.abspath(__file__)) ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if db_name == "Catalog1": artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_1") else: artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_2") userpath = file.getParentPath() username = userpath.split('/') self.log(Level.INFO, "Getting Username " + username[2]) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclSQLPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + lclSQLPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "Select ParentName 'TSK_FH_PATH', Childname 'TSK_FH_FILE_NAME', " + \ "Filesize 'TSK_FH_FILE_SIZE', " + \ "usn 'TSK_FH_USN_JOURNAL_ENTRY', " + \ "FileCreated 'TSK_FH_FILE_CREATED', filemodified 'TSK_FH_FILE_MODIFIED', " + \ "tqueued 'TSK_FH_BACKUP_QUEUED', tcreated 'TSK_FH_BACKUP_CREATED', " + \ "tcaptured 'TSK_FH_BACKUP_CAPTURED', tupdated 'TSK_FH_BACKUP_UPDATED', " + \ "tvisible 'TSK_FH_BACKUP_VISIBLE' from file_history" self.log(Level.INFO, "SQL Statement --> " + SQL_Statement) resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log( Level.INFO, "Error querying database for File_History table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") FH_Path = resultSet.getString("TSK_FH_PATH") FH_File_Name = resultSet.getString("TSK_FH_FILE_NAME") FH_Filesize = resultSet.getString("TSK_FH_FILE_SIZE") FH_Usn = resultSet.getString("TSK_FH_USN_JOURNAL_ENTRY") FH_FC = resultSet.getInt("TSK_FH_FILE_CREATED") FH_FM = resultSet.getInt("TSK_FH_FILE_MODIFIED") FH_BQ = resultSet.getInt("TSK_FH_BACKUP_QUEUED") FH_BC = resultSet.getInt("TSK_FH_BACKUP_CREATED") FH_BCP = resultSet.getInt("TSK_FH_BACKUP_CAPTURED") FH_BU = resultSet.getInt("TSK_FH_BACKUP_UPDATED") FH_BV = resultSet.getInt("TSK_FH_BACKUP_VISIBLE") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported art = file.newArtifact(artID_fh) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_fh_pn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Path)), \ (BlackboardAttribute(attID_fh_fn, ParseFileHistoryIngestModuleFactory.moduleName, FH_File_Name)), \ (BlackboardAttribute(attID_fh_fs, ParseFileHistoryIngestModuleFactory.moduleName, FH_Filesize)), \ (BlackboardAttribute(attID_fh_usn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Usn)), \ (BlackboardAttribute(attID_fh_fc, ParseFileHistoryIngestModuleFactory.moduleName, FH_FC)), \ (BlackboardAttribute(attID_fh_fm, ParseFileHistoryIngestModuleFactory.moduleName, FH_FM)), \ (BlackboardAttribute(attID_fh_bq, ParseFileHistoryIngestModuleFactory.moduleName, FH_BQ)), \ (BlackboardAttribute(attID_fh_bc, ParseFileHistoryIngestModuleFactory.moduleName, FH_BC)), \ (BlackboardAttribute(attID_fh_bcp, ParseFileHistoryIngestModuleFactory.moduleName, FH_BCP)), \ (BlackboardAttribute(attID_fh_bu, ParseFileHistoryIngestModuleFactory.moduleName, FH_BU)), \ (BlackboardAttribute(attID_fh_bv, ParseFileHistoryIngestModuleFactory.moduleName, FH_BV)), \ (BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), \ ParseFileHistoryIngestModuleFactory.moduleName, username[2])))) try: #index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseFileHistoryIngestModuleFactory.moduleName, artID_fh_evt, None)) # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up prefetch directory and files try: shutil.rmtree(Temp_Dir) except: self.log(Level.INFO, "removal of directory tree failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Windows File History Parser", " Windows File History Has Been Parsed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "WebcacheV01.dat") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Webcache") except: self.log(Level.INFO, "Webcache Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\Webcache", file.getName() + "-" + str(file.getId())) DbPath = os.path.join( Temp_Dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log( Level.INFO, file.getName() + ' ==> ' + str(file.getId()) + ' ==> ' + file.getUniquePath()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Temp_Dir + "\WebcacheV01.db3") subprocess.Popen([self.path_to_exe, lclDbPath, DbPath]).communicate()[0] # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK for file in files: # Open the DB using JDBC lclDbPath = os.path.join( Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId()) + ".db3") self.log( Level.INFO, "Path the Webcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "Select distinct container_name from all_containers;") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log( Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK Container_List = [] while resultSet.next(): Container_List.append(resultSet.getString("container_name")) #self.log(Level.INFO, "Number of containers ==> " + str(len(Container_List)) + " ==> " + str(Container_List)) #self.log(Level.INFO, "Number of containers ==> " + str(Container_List) # Cycle through each row and create artifacts for c_name in Container_List: try: container_name = c_name #self.log(Level.INFO, "Result (" + container_name + ")") #self.log(Level.INFO, "Result get information from table " + container_name + " ") SQL_String_1 = "Select * from all_containers where container_name = '" + container_name + "';" SQL_String_2 = "PRAGMA table_info('All_Containers')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_WC_" + container_name.upper() artifact_desc = "WebcacheV01 " + container_name.upper() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_web = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_web = skCase.getArtifactTypeID(artifact_name) artID_web_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append( resultSet2.getString("name").upper()) Column_Types.append( resultSet2.getString("type").upper()) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_web) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ==> " + Column_Types[Column_Number - 1]) #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttrTypeID(c_name) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute( BlackboardAttribute( attID_ex1, ParseWebcacheIngestModuleFactory. moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute( BlackboardAttribute( attID_ex1, ParseWebcacheIngestModuleFactory. moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute( BlackboardAttribute( attID_ex1, ParseWebcacheIngestModuleFactory. moduleName, long( resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseWebcacheIngestModuleFactory.moduleName, artID_web_evt, None)) except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up #stmt.close() #dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\Webcache\\" + file.getName() + "-" + str(file.getId())) os.remove(Temp_Dir + "\\" + file.getName() + "-" + str(file.getId()) + ".db3") except: self.log( Level.INFO, "removal of Webcache file failed " + Temp_Dir + "\\" + file.getName() + "-" + str(file.getId())) try: os.rmdir(Temp_Dir + "\\Webcache") except: self.log(Level.INFO, "removal of Webcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Webcache Parser", " Webcache Has Been Parsed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() for SQLite_DB in self.List_Of_DBs: files = fileManager.findFiles(dataSource, SQLite_DB) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join( Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) #self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + "-" + str(file.getId()) + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet = stmt.executeQuery( "Select tbl_name, type from SQLITE_MASTER where type in ('table','view');" ) #self.log(Level.INFO, "query SQLite Master table") #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: self.log( Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") object_type = resultSet.getString("type") resultSet4 = stmt4.executeQuery( "Select count(*) 'NumRows' from " + resultSet.getString("tbl_name") + " ") # while resultSet4.next(): row_count = resultSet4.getInt("NumRows") self.log( Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_" + SQLite_DB.upper( ) + "_" + table_name.upper() artifact_desc = "SQLite Database " + SQLite_DB.upper( ) + " " + object_type.title( ) + " " + table_name.upper() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_sql = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_sql = skCase.getArtifactTypeID( artifact_name) artID_sql_evt = skCase.getArtifactType( artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append( resultSet2.getString("name").upper()) Column_Types.append( resultSet2.getString("type").upper()) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("name")) ##attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) ##self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) attribute_name = "TSK_" + SQLite_DB + "_" + table_name.upper( ) + "_" + resultSet2.getString( "name").upper() #self.log(Level.INFO, "attribure id for " + attribute_name + " == " + resultSet2.getString("type").upper()) if resultSet2.getString( "type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString( "type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString( "type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString( "type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString( "type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .LONG, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .LONG, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sql) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_" + SQLite_DB + "_" + table_name.upper( ) + "_" + Column_Names[Column_Number - 1] #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType( c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, resultSet3.getString( Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, resultSet3.getString( Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, "BLOBS Not Supported - Look at actual file" )) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, "BLOBS Not Supported - Look at actual file" )) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, long( resultSet3.getFloat( Column_Number)))) else: art.addAttribute( BlackboardAttribute( attID_ex1, ParseSQLiteDBIngestModuleFactory .moduleName, long( resultSet3.getInt( Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParseSQLiteDBIngestModuleFactory.moduleName, \ artID_sql_evt, None)) except SQLException as e: self.log( Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") except SQLException as e: self.log( Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Clean up #stmt.close() #dbConn.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "SQLite Database Parser", " SQLite Databases have been parsed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def add_account_type(accountTypeName, displayName): communication_manager = Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager() return CommunicationsManager.addAccountType(communication_manager, accountTypeName, displayName)
def __findContactsInDB(self, contactDb, dataSource): if not contactDb: return try: current_case = Case.getCurrentCaseThrows() # Create a helper to parse the DB contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, contactDb.getDBFile(), Account.Type.PHONE ) # get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype) # sorted by name, so phonenumber/email would be consecutive for a person if they exist. # check if contacts.name_raw_contact_id exists. Modify the query accordingly. columnFound = contactDb.columnExists("contacts", "name_raw_contact_id") if columnFound: resultSet = contactDb.runQuery( "SELECT mimetype, data1, name_raw_contact.display_name AS display_name \n" + "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n" + "JOIN raw_contacts AS name_raw_contact ON(name_raw_contact_id=name_raw_contact._id) " + "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n" + "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n" + "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n" + "ORDER BY name_raw_contact.display_name ASC;") else: resultSet = contactDb.runQuery( "SELECT mimetype, data1, raw_contacts.display_name AS display_name \n" + "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n" + "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n" + "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n" + "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n" + "ORDER BY raw_contacts.display_name ASC;") contactArtifact = None oldName = None phoneNumber = None emailAddr = None name = None while resultSet.next(): name = resultSet.getString("display_name") data1 = resultSet.getString("data1") # the phone number or email mimetype = resultSet.getString("mimetype") # either phone or email if oldName and (name != oldName): if phoneNumber or emailAddr: contactArtifact = contactDbHelper.addContact(oldName, phoneNumber, # phoneNumber, None, # homePhoneNumber, None, # mobilePhoneNumber, emailAddr) # emailAddr oldName = name phoneNumber = None emailAddr = None name = None if mimetype == "vnd.android.cursor.item/phone_v2": phoneNumber = data1 else: emailAddr = data1 if name: oldName = name # create contact for last row if oldName and (phoneNumber or emailAddr): contactArtifact = contactDbHelper.addContact(oldName, phoneNumber, # phoneNumber, None, # homePhoneNumber, None, # mobilePhoneNumber, emailAddr) # emailAddr except SQLException as ex: self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Failed to add Android message artifacts.", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except BlackboardException as ex: self._logger.log(Level.WARNING, "Failed to post artifacts.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except NoCurrentCaseException as ex: self._logger.log(Level.WARNING, "No case currently open.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) finally: contactDb.close()
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/3.1/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # This will work in 4.0.1 and beyond #try: # # index the artifact for keyword search # blackboard.indexArtifact(art) #except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def get_or_create_account(account_type, file, uniqueid): return Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().createAccountFileInstance( account_type, uniqueid, "test", file.getDataSource())
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, r"%.log") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "WhatsApp_Parse") try: os.mkdir(temporaryDirectory) except: pass for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) for line in self.find_printable(extractedFile): if "action,presence" in line: status = self.online_status(line) self.insert_art_att(file, "TSK_ONLINE_STATUS", "Online Status", "TSK_ONLINE_STATUS", "status", status, line) if "action,chatstate" in line: name = self.chat_state(line) self.insert_art_att(file, "TSK_CHAT_STATE", "Chat State", "TSK_CHAT_STATE_NAME", "state", name, line) if "action,message" in line: msg = self.get_message(line) self.insert_art_att(file, "TSK_MSG", "Sent Media", "TSK_MSG_TYPE", "msg type", msg, line) if "action,msgs" in line: delt = self.delete(line) self.insert_art_att(file, "TSK_MSG_DELETE", "Message Deleted", "TSK_DELETE", "action", delt, line) if "action,block" in line: block = self.block(line) self.insert_art_att(file, "TSK_BLOCKED", "Contacts Blocked", "TSK_IF_BLOCKED", "if blocked", block, line) if "action,battery" in line: battery = self.battery(line) self.insert_art_att(file, "TSK_BATTERY", "Battery Percent", "TSK_BATTERY_PERCENT", "battery", battery, line) if "action,group" in line: grp = self.group_action(line) self.insert_art_att(file, "TSK_GROUP", "Group creation", "TSK_GRP_ACTION", "action", grp, line) if "action,status" in line: status = self.get_status(line) self.insert_art_att(file, "TSK_STATUS_INFO", "Status/Story Actions", "TSK_STATUS_ACTION", "action", status, line) if "action,chat," in line: read = self.get_read_msg(line) self.insert_art_att(file, "TSK_READ_INFO", "Reading Message", "TSK_READ", "action", read, line) if "Media:sendToChat" in line: sent = self.get_send_media(line) self.insert_art_att(file, "TSK_MSG_SEND", "Sending Message", "TSK_SENT", "action", sent, line) if "action,msg,relay" in line: rcv = self.get_rcv_media(line) self.insert_art_att(file, "TSK_RCV_MEDIA", "Receiving Message", "TSK_RCV", "action", rcv, line) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "WhatsAppParse Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def analyze(self, dataSource, fileManager, context): selfAccountId = None messageDbs = AppSQLiteDB.findAppDatabases(dataSource, "mmssms.db", True, self._PACKAGE_NAME) for messageDb in messageDbs: try: current_case = Case.getCurrentCaseThrows() if selfAccountId is not None: messageDbHelper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, messageDb.getDBFile(), Account.Type.PHONE, Account.Type.IMO, selfAccountId) else: messageDbHelper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, messageDb.getDBFile(), Account.Type.PHONE) uuid = UUID.randomUUID().toString() messagesResultSet = messageDb.runQuery( "SELECT address, date, read, type, subject, body, thread_id FROM sms;" ) if messagesResultSet is not None: while messagesResultSet.next(): direction = "" address = None fromId = None toId = None address = messagesResultSet.getString( "address" ) # may be phone number, or other addresses timeStamp = Long.valueOf( messagesResultSet.getString("date")) / 1000 read = messagesResultSet.getInt( "read") # may be unread = 0, read = 1 subject = messagesResultSet.getString( "subject") # message subject msgBody = messagesResultSet.getString( "body") # message body thread_id = "{0}-{1}".format( uuid, messagesResultSet.getInt("thread_id")) if messagesResultSet.getString("type") == "1": direction = CommunicationDirection.INCOMING fromId = address else: direction = CommunicationDirection.OUTGOING toId = address message_read = messagesResultSet.getInt( "read") # may be unread = 0, read = 1 if (message_read == 1): msgReadStatus = MessageReadStatus.READ elif (message_read == 0): msgReadStatus = MessageReadStatus.UNREAD else: msgReadStatus = MessageReadStatus.UNKNOWN ## add a message if address is not None: messageArtifact = messageDbHelper.addMessage( self._MESSAGE_TYPE, direction, fromId, toId, timeStamp, msgReadStatus, subject, # subject msgBody, thread_id) except SQLException as ex: self._logger.log( Level.WARNING, "Error processing query result for Android messages.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Failed to add Android message artifacts.", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except BlackboardException as ex: self._logger.log(Level.WARNING, "Failed to post artifacts.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except NoCurrentCaseException as ex: self._logger.log(Level.WARNING, "No case currently open.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) finally: messageDb.close()
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log( Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase() skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType("TSK_EVTX_LOGS", "Windows Event Logs") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_LONG", "Windows Event Logs Long Tail Analysis") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: attID_ev_fn = skCase.addArtifactAttributeType( "TSK_EVTX_FILE_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log( Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType( "TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType( "TSK_EVTX_COMPUTER_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log( Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log( Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> " ) try: attID_ev_el = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_LEVEL", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType( "TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log( Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType( "TSK_EVTX_IDENTIFIER", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType( "TSK_EVTX_SOURCE_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType( "TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_TIME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_LONG") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_LONG") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType( "TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() if self.List_Of_Events[0] == 'ALL': files = fileManager.findFiles(dataSource, "%.evtx") else: for eventlog in self.List_Of_Events: file_name = fileManager.findFiles(dataSource, eventlog) files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "EventLogs") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Event Log Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source " + self.path_to_exe + " parm 1 ==> " + temp_dir + " Parm 2 ==> " + os.path.join(temp_dir, "EventLogs.db3")) subprocess.Popen([ self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "EventLogs.db3") ]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log( Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() if self.List_Of_Events[0] == 'ALL': files = fileManager.findFiles(dataSource, "%.evtx") else: for eventlog in self.List_Of_Events: file_name = fileManager.findFiles(dataSource, eventlog) files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" #self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log( Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString("Computer_Name") Event_Identifier = resultSet.getInt("Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString( "Event_Source_Name") Event_User_Security_Identifier = resultSet.getString( "Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString( "Event_Detail_Text") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxDbIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxDbIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxDbIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxDbIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxDbIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxDbIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxDbIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxDbIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxDbIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxDbIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxDbIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxDbIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxDbIngestModuleFactory.moduleName, Event_Time_Epoch)) try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" #self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log( Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet_1.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt( "Event_Identifier") Event_ID_Count = resultSet_1.getInt("Number_Of_Events") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log( Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxDbIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxDbIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxDbIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxDbIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up stmt_1.close() stmt.close() dbConn.close() os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log( Level.INFO, "removal of Event Log file failed " + os.path.join(temp_dir, file.getName())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + temp_dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxDbIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed ") IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxDbIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() try: os.mkdir(Temp_Dir + "\Volatility\\Dump-Files") except: self.log(Level.INFO, "Plaso Import Directory already exists " + Temp_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) #file_name = os.path.basename(self.path_to_storage_file) #self.log(Level.INFO, "File Name ==> " + file_name) #base_file_name = os.path.splitext(file_name)[0] #self.database_file = Temp_Dir + "\\volatility\\Plaso.db3" for file in files: self.log(Level.INFO, "File name to process is ==> " + str(file)) self.log( Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) image_file = file.getLocalAbsPath() if image_file != None: self.log( Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) # file_name = os.path.basename(file.getLocalAbsPath()) # self.log(Level.INFO, "File Name ==> " + file_name) # base_file_name = os.path.splitext(file_name)[0] # self.database_file = Temp_Dir + "\\volatility\\" + base_file_name + ".db3" # self.log(Level.INFO, "File Name ==> " + self.database_file) dump_file = Temp_Dir + "\\volatility\\Dump-Files" if self.isAutodetect: self.find_profile(image_file) if self.Profile == None: continue for plugin_to_run in self.Plugins: if self.Python_Program: if self.isProcessIds: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --dump-dir=" + dump_file + " --pid=" + self.Process_Ids_To_Dump + " " + plugin_to_run) pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, \ "--dump-dir=" + dump_file, "--pid=" + self.Process_Ids_To_Dump, plugin_to_run], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --dump-dir=" + dump_file + " " + plugin_to_run) pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, \ "--dump-dir=" + dump_file, plugin_to_run], stdout=PIPE, stderr=PIPE) else: if self.isProcessIds: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --dump-dir=" + dump_file + " --pid=" + self.Process_Ids_To_Dump + " " + plugin_to_run) pipe = Popen([self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, \ "--dump-dir=" + dump_file, "--pid=" + self.Process_Ids_To_Dump, plugin_to_run], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --dump-dir=" + dump_file + " " + plugin_to_run) pipe = Popen([self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, \ "--dump-dir=" + dump_file, plugin_to_run], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "VolatilitySettings", " VolatilitySettings Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def __findTextsInDB(self, databasePath, abstractFile, dataSource): if not databasePath: return bbartifacts = list() try: Class.forName("org.sqlite.JDBC") # load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() except (ClassNotFoundException) as ex: self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) return except (SQLException) as ex: # Error opening database. return # Create a 'Device' account using the data source device id datasourceObjId = dataSource.getDataSource().getId() ds = Case.getCurrentCase().getSleuthkitCase().getDataSource( datasourceObjId) deviceID = ds.getDeviceId() deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().createAccountFileInstance( Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile) uuid = UUID.randomUUID().toString() resultSet = None try: resultSet = statement.executeQuery( "SELECT address, date, read, type, subject, body, thread_id FROM sms;" ) while resultSet.next(): address = resultSet.getString( "address") # may be phone number, or other addresses date = Long.valueOf(resultSet.getString("date")) / 1000 read = resultSet.getInt("read") # may be unread = 0, read = 1 subject = resultSet.getString("subject") # message subject body = resultSet.getString("body") # message body thread_id = "{0}-{1}".format(uuid, resultSet.getInt("thread_id")) attributes = ArrayList() artifact = abstractFile.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE) #create Message artifact and then add attributes from result set. if resultSet.getString("type") == "1": attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DIRECTION, general.MODULE_NAME, "Incoming")) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_PHONE_NUMBER_FROM, general.MODULE_NAME, address)) else: attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DIRECTION, general.MODULE_NAME, "Outgoing")) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_PHONE_NUMBER_TO, general.MODULE_NAME, address)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, date)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_READ_STATUS, general.MODULE_NAME, Integer(read))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SUBJECT, general.MODULE_NAME, subject)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT, general.MODULE_NAME, body)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE, general.MODULE_NAME, "SMS Message")) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_THREAD_ID, general.MODULE_NAME, thread_id)) artifact.addAttributes(attributes) # Create an account msgAccountInstance = Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().createAccountFileInstance( Account.Type.PHONE, address, general.MODULE_NAME, abstractFile) # create relationship between accounts Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().addRelationships( deviceAccountInstance, [msgAccountInstance], artifact, Relationship.Type.MESSAGE, date) bbartifacts.append(artifact) except SQLException as ex: # Unable to execute text messages SQL query against database. pass except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing text messages to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) finally: if bbartifacts: Case.getCurrentCase().getSleuthkitCase().getBlackboard( ).postArtifacts(bbartifacts, general.MODULE_NAME) try: if resultSet is not None: resultSet.close() statement.close() connection.close() except Exception as ex: # Error closing database. pass
except: continue except: #message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Discord Analyzer", "In Except") #IngestServices.getInstance().postMessage(message) continue return artifact def process(self,dataSource,progressBar): #setting up case case = Case.getCurrentCase().getSleuthkitCase() temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Discord_Parse") try: os.mkdir(temporaryDirectory) except: pass #setup artiact and attributes artID = self.create_artifact_type(case,"TSK_Discord","Discord cache") attID_username = self.create_attribute_type(case, "TSK_Discord_Username", "Username") attID_id = self.create_attribute_type(case, "TSK_Discord_ID", "Discord ID") attID_disc = self.create_attribute_type(case, "TSK_Discord_Discriminator", "Discriminator") attID_timestamp = self.create_attribute_type(case, "TSK_Discord_Timestamp", "Timestamp") attID_message = self.create_attribute_type(case, "TSK_Discord_Message", "Message") attID_channel_id = self.create_attribute_type(case, "TSK_Discord_Channel_ID", "Channel_ID")
def process(self, dataSource, progressBar): msgcounter = 0 global mama progressBar.switchToIndeterminate() global ccase ccase = Case.getCurrentCase().getSleuthkitCase() blackboard = Case.getCurrentCase().getServices().getBlackboard() fileManager = Case.getCurrentCase().getServices().getFileManager() accounts = fileManager.findFiles(dataSource, "sync_config.db") numFiles = len(accounts) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in accounts: fileCount += 1 progressBar.progress(fileCount) progressBar.progress("Google Drive Analyzer") if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) #ContentUtils.writeToFile() binary_file = open(lclDbPath, "rb") data = binary_file.read(15) binary_file.close() if str(data) == "SQLite format 3": try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Initial config database:", "Cannot open " + file.getName() + " as SQLite", file.getName() + " not a database") #IngestServices.getInstance().postMessage(message) pass return IngestModule.ProcessResult.ERROR try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() stmt5 = dbConn.createStatement() try: resultSet1 = stmt.executeQuery( "select data_value 'account' from data WHERE entry_key='user_email';" ) resultSet2 = stmt2.executeQuery( "select count(*) 'count' from data WHERE entry_key='user_email';" ) resultSet4 = stmt4.executeQuery( "select data_value from data where data_key ='rowkey';" ) GAccount = resultSet1.getString("account") no_of_accounts = resultSet2.getInt("count") if no_of_accounts > 0: gBase = fileManager.findFiles( dataSource, "snapshot.db", file.getParentPath()) #ccase = Case.getCurrentCase().getSleuthkitCase() artifact_name = "TSK_MSG_" + GAccount artifact_desc = "Google Drive Account: " + GAccount try: #Try adding the Articaft Type artID_Gdrive = ccase.addArtifactType( artifact_name, artifact_desc) except: #do nothing pass artID_Gdrive = ccase.getArtifactTypeID( artifact_name) artID_Gdrive_evt = ccase.getArtifactType( artifact_name) for gDatabase in gBase: if str(file.getParentPath()) in str(gDatabase): lclDbPath2 = os.path.join( Case.getCurrentCase().getTempDirectory( ), str(GAccount) + ".db") ContentUtils.writeToFile( gDatabase, File(lclDbPath2)) binary_file = open(lclDbPath2, "rb") data = binary_file.read(15) binary_file.close() if str(data) == "SQLite format 3": try: Class.forName("org.sqlite.JDBC" ).newInstance() dbFiles = DriverManager.getConnection( "jdbc:sqlite:%s" % lclDbPath2) dbSMT = dbFiles.createStatement() dbSMT2 = dbFiles.createStatement() dbSMT3 = dbFiles.createStatement() dbSMT4 = dbFiles.createStatement() except SQLException as e: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "Cannot open " + file.getName() + " as SQLite", "FATAL") IngestServices.getInstance( ).postMessage(message) msgcounter += 1 return IngestModule.ProcessResult.ERROR resultSet4 = dbSMT2.executeQuery( "select count(filename) 'Count' from cloud_entry;" ) resultSet5 = dbSMT3.executeQuery( "select child_doc_id, parent_doc_id from cloud_relations;" ) if resultSet4.getInt("Count") > 0: try: attribute_name = "TSK_GDRIVE_FILENAME" attribute_name2 = "TSK_GDRIVE_TIME" attribute_name3 = "TSK_GDRIVE_SIZE" attribute_name4 = "TSK_GDRIVE_SHARED" attribute_name5 = "TSK_GDRIVE_TYPE" attribute_name0 = "TSK_GDRIVE_PARENT" attID_ex0 = ccase.addArtifactAttributeType( attribute_name0, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Parent Directory") attID_ex1 = ccase.addArtifactAttributeType( attribute_name, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Filename or directory") attID_ex2 = ccase.addArtifactAttributeType( attribute_name2, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Modification date") attID_ex3 = ccase.addArtifactAttributeType( attribute_name3, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Size in KiB") attID_ex4 = ccase.addArtifactAttributeType( attribute_name4, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Shared with others") attID_ex5 = ccase.addArtifactAttributeType( attribute_name5, BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .STRING, "Type") except: pass parent = "" mama = "" child = "" ouder = "" papa = "" child2 = "" while resultSet5.next( ): #Loop for the files within dbase - cloud_relations parent = resultSet5.getString( "parent_doc_id") dbFiles = DriverManager.getConnection( "jdbc:sqlite:%s" % lclDbPath2) dbSMT6 = dbFiles.createStatement( ) resultSet6 = dbSMT6.executeQuery( "select filename from cloud_entry where doc_id='" + str(parent) + "';") parentfilename = resultSet6.getString( "filename") PathCheck = 0 mama = "" papa = "" child = "" child = resultSet5.getString( "child_doc_id") child2 = child teller = 0 while PathCheck == 0: teller += 1 try: #Should work = gets the parent_doc_id from relations table try: dbSMT7 = dbFiles.createStatement( ) resultSet10 = dbSMT7.executeQuery( "select parent_doc_id from cloud_relations where child_doc_id = '" + str(child) + "';") ouder = resultSet10.getString( "parent_doc_id" ) except: break try: dbSMT7 = dbFiles.createStatement( ) resultSet11 = dbSMT7.executeQuery( "select filename from cloud_entry where doc_id = '" + str(ouder) + "';") papa = resultSet11.getString( "filename") except: break if len(papa ) == 0 or str( papa ) == "root": PathCheck = 1 mama = "[root]\\" + str( mama) break else: tijdelijk = mama mama = str( papa ) + "\\" + str( tijdelijk) child = ouder except: break SQLFiles = "select f.filename, datetime(f.modified, 'unixepoch') 'Time', f.size/1024 'KB', f.doc_id, CASE f.shared WHEN 0 THEN 'No' ELSE 'Yes' END Shared , CASE f.doc_type WHEN 0 THEN 'Directory' ELSE 'File' END Type from cloud_entry f, cloud_relations c where c.parent_doc_id ='" + str( parent ) + "' and f.doc_id='" + str( child2 ) + "' and f.doc_id=c.child_doc_id;" resultSet3 = dbSMT.executeQuery( SQLFiles) while resultSet3.next(): if self.context.isJobCancelled( ): message = IngestMessage.createMessage( IngestMessage. MessageType.DATA, "Canceling", "Enumeration of files" ) ngestServices.getInstance( ).postMessage(message) msgcounter += 1 return IngestModule.ProcessResult.OK filename = resultSet3.getString( "filename") time = resultSet3.getString( "Time") Size = resultSet3.getString( "KB") sharing = resultSet3.getString( "Shared") filetype = resultSet3.getString( "Type") # # # artifact_name = "TSK_MSG_" + GAccount art = gDatabase.newArtifact( artID_Gdrive) attID_ex0 = ccase.getAttributeType( "TSK_GDRIVE_PARENT") art.addAttribute( BlackboardAttribute( attID_ex1, GDriveDbIngestModuleFactory .moduleName, str(mama + "\\" + filename))) attID_ex2 = ccase.getAttributeType( "TSK_GDRIVE_TIME") art.addAttribute( BlackboardAttribute( attID_ex2, GDriveDbIngestModuleFactory .moduleName, time)) attID_ex3 = ccase.getAttributeType( "TSK_GDRIVE_SIZE") art.addAttribute( BlackboardAttribute( attID_ex3, GDriveDbIngestModuleFactory .moduleName, Size)) attID_ex4 = ccase.getAttributeType( "TSK_GDRIVE_SHARED") art.addAttribute( BlackboardAttribute( attID_ex4, GDriveDbIngestModuleFactory .moduleName, sharing)) attID_ex5 = ccase.getAttributeType( "TSK_GDRIVE_TYPE") art.addAttribute( BlackboardAttribute( attID_ex5, GDriveDbIngestModuleFactory .moduleName, filetype)) IngestServices.getInstance( ).fireModuleDataEvent( ModuleDataEvent( GDriveDbIngestModuleFactory .moduleName, artID_Gdrive_evt, None)) dbSMT6.close() else: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "No accounts found in" + file.getName(), "Error findings accounts") IngestServices.getInstance( ).postMessage(message) msgcounter += 1 return IngestModule.ProcessResult.ERROR except SQLException as e: self.log(Level.INFO, "SQL Error: " + e.getMessage()) except SQLException as e: self.log( Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") # # Clean up try: stmt.close() stmt2.close() stmt3.close() stmt4.close() stmt5.close() dbSMT.close() dbConn.close() os.remove(lclDbPath) os.remove(lclDbPath2) except: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "Unable to clean up", "Error - Cleanup") #IngestServices.getInstance().postMessage(message) msgcounter += 1 else: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "Not a SQLite Database - Missing magic number", "Not database") #IngestServices.getInstance().postMessage(message) msgcounter += 1 return IngestModule.ProcessResult.ERROR # After all databases, post a message to the ingest messages in box. if numFiles == 0: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "Info:", "Nothing to analyze ") #IngestServices.getInstance().postMessage(message) msgcounter += 1 else: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Google Drive Analyzer", "Info:", "Analyzed %d files" % fileCount) #IngestServices.getInstance().postMessage(message) msgcounter += 1 return IngestModule.ProcessResult.OK
def analyze(self, dataSource, fileManager, context): historyDbs = AppSQLiteDB.findAppDatabases(dataSource, "history.db", True, self._PACKAGE_NAME) for historyDb in historyDbs: try: current_case = Case.getCurrentCaseThrows() historyDbHelper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._MODULE_NAME, historyDb.getDBFile(), Account.Type.SHAREIT) queryString = """ SELECT history_type, device_id, device_name, description, timestamp, file_path FROM history JOIN item where history.content_id = item.item_id """ historyResultSet = historyDb.runQuery(queryString) if historyResultSet is not None: while historyResultSet.next(): direction = "" fromId = None toId = None fileAttachments = ArrayList() if (historyResultSet.getInt("history_type") == 1): direction = CommunicationDirection.INCOMING fromId = historyResultSet.getString("device_id") else: direction = CommunicationDirection.OUTGOING toId = historyResultSet.getString("device_id") timeStamp = historyResultSet.getLong( "timestamp") / 1000 messageArtifact = historyDbHelper.addMessage( self._MESSAGE_TYPE, direction, fromId, toId, timeStamp, MessageReadStatus.UNKNOWN, None, # subject None, # message text None) # thread id # add the file as attachment fileAttachments.add( FileAttachment( current_case.getSleuthkitCase(), historyDb.getDBFile().getDataSource(), historyResultSet.getString("file_path"))) messageAttachments = MessageAttachments( fileAttachments, []) historyDbHelper.addAttachments(messageArtifact, messageAttachments) except SQLException as ex: self._logger.log( Level.WARNING, "Error processing query result for ShareIt history.", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: self._logger.log( Level.SEVERE, "Failed to create ShareIt message artifacts.", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except BlackboardException as ex: self._logger.log(Level.WARNING, "Failed to post artifacts.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) except NoCurrentCaseException as ex: self._logger.log(Level.WARNING, "No case currently open.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) finally: historyDb.close()
def find_profile(self, image_file): Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) self.log(Level.INFO, "File name to process is ==> " + str(image_file)) file_name = os.path.basename(image_file) self.log(Level.INFO, "File Name ==> " + file_name) base_file_name = os.path.splitext(file_name)[0] database_file = Temp_Dir + "\\" + base_file_name + ".db3" self.log(Level.INFO, "File Name ==> " + self.database_file) if self.Python_Program: self.log(Level.INFO, "Running program ==> " + "Python " + self.Volatility_Executable + " -f " + image_file + " " + \ " --output=sqlite --output-file=" + self.database_file + " imageinfo") pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", image_file, "--output=sqlite", \ "--output-file=" + database_file, "imageinfo"], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + image_file + " " + \ " --output=sqlite --output-file=" + self.database_file + " imageinfo") pipe = Popen([self.Volatility_Executable, "-f", image_file, "--output=sqlite", \ "--output-file=" + database_file, "imageinfo"], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC self.log( Level.INFO, "Path the volatility database file created ==> " + self.database_file) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.database_file) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the database try: stmt = dbConn.createStatement() resultSet1 = stmt.executeQuery( 'Select "Suggested Profile(s)" from imageinfo') self.log(Level.INFO, "query SQLite Master table ==> ") self.log(Level.INFO, "query " + str(resultSet1)) # Cycle through each row and create artifacts profile_names = None while resultSet1.next(): try: profile_names = resultSet1.getString( "Suggested Profile(s)") if profile_names == None: self.Profile = None elif ',' in profile_names: profile_list = profile_names.split(",") self.Profile = profile_list[0] elif ' ' in profle_names: profile_list = profile_names.split(",") self.Profile = profile_list[0] else: self.Profile = profile_names except: self.log( Level.INFO, "Error getting profile name, Profile name is ==> " + profile_names + " <==") except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt.close() dbConn.close() os.remove(database_name) except: self.log( Level.INFO, "removal of volatility imageinfo database failed " + Temp_Dir)
def process(self, dataSource, progressBar): # Find autopsy.db file for a particular case # Add Case folder as data source fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%utopsy.db") # we don't know how much work there is yet progressBar.switchToIndeterminate() # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # instantiate a GeoIP class with the location of our uncompressed database 'GeoliteCity.dat'. # "CHANGE THE FOLLOWING PATH ACCORDING TO YOUR SYSTEM" #### gi = pygeoip.GeoIP(r'C:\Users\Toshiba\AppData\Roaming\autopsy\python_modules\geolocation\GeoLiteCity.dat') #gi = pygeoip.GeoIP(os.getcwd()+'\\'+str('GeoLiteCity.dat')) # Define function 'Info' to get the resultant directory def Info(tgt): rec = gi.record_by_addr(tgt) return rec # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the 'blackboard_attributes' table in the database and get IP Address column. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT value_text FROM blackboard_attributes WHERE attribute_type_id = '10';") except SQLException as e: self.log(Level.INFO, "Error querying database for table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Create / open 'GeoIPResult.txt' that will contain the entire inforamtion # regarding a particular IP address outPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "ModuleOutput","GeoIPResult.txt") outFile = open(outPath,'w') # Cycle through each IP address in Ip address column. while resultSet.next(): try: IP_ADD = resultSet.getString("value_text") try: INFO = Info(IP_ADD) latitude = INFO.get('latitude') longitude = INFO.get('longitude') outFile.write(IP_ADD+' ---------->> '+str(INFO)+'\n') except: outFile.write("Not valid ipaddress"+'\n') except SQLException as e: self.log(Level.INFO, "Error getting values from table (" + e.getMessage() + ")") # Make an artifact on the blackboard,TSK_GPS_TRACKPOINT and give it attributes longitude and latitude. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE .getTypeID(), GeoIPlocationFinderIngestModuleFactory.moduleName, longitude)) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE .getTypeID(), GeoIPlocationFinderIngestModuleFactory.moduleName, latitude)) # Close .txt file and Db connection outFile.close() stmt.close() dbConn.close() os.remove(lclDbPath) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log( Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") skCase = Case.getCurrentCase().getSleuthkitCase() head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "Alexa_DB.db3") #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) Alexa_DB.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the database table for unique file names try: stmt = dbConn.createStatement() process_data_sql = "Select distinct file_name from alexa_databases" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "Query Database table for unique file names") except SQLException as e: self.log(Level.INFO, "Error querying database for unique file names") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, resultSet.getString("file_name")) numFiles = len(files) self.log( Level.INFO, "found " + str(numFiles) + " files for file_name ==> " + resultSet.getString("file_name")) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join( Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) #self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn_x = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + "-" + str(file.getId()) + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK try: stmt_sql = dbConn.createStatement() process_stmt_sql = "select artifact_name, artifact_description, sql_to_run from alexa_databases where file_name = '" + resultSet.getString( "file_name") + "';" self.log(Level.INFO, process_stmt_sql) resultSet_sql = stmt_sql.executeQuery(process_stmt_sql) self.log(Level.INFO, "Query Database table for sql statements") except SQLException as e: self.log( Level.INFO, "Error querying database for sql_statements for file " + resultSet.getString("file_name")) # return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet_sql.next(): try: stmt_1 = dbConn_x.createStatement() sql_to_run = resultSet_sql.getString("sql_to_run") self.log(Level.INFO, sql_to_run) resultSet_3 = stmt_1.executeQuery(sql_to_run) self.log(Level.INFO, "query " + sql_to_run) except SQLException as e: self.log( Level.INFO, "Error querying database for " + resultSet.getString("file_name")) continue # return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_sql = skCase.addArtifactType( resultSet_sql.getString("artifact_name"), resultSet_sql.getString("artifact_description")) except: self.log( Level.INFO, "Artifacts Creation Error, for artifact. ==> " + resultSet_sql.getString("artifact_name")) artID_hst = skCase.getArtifactTypeID( resultSet_sql.getString("artifact_name")) artID_hst_evt = skCase.getArtifactType( resultSet_sql.getString("artifact_name")) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log( Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range(1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_ALEXA_" + meta.getColumnLabel(x).upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, meta.getColumnLabel(x)) except: self.log( Level.INFO, "Attributes Creation Error, " + "TSK_ALEXA_" + meta.getColumnLabel(x) + " ==> ") column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType( "TSK_ALEXA_" + col_name.upper()) self.log( Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log( Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getString( col_name))) except: self.log( Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getString( col_name))) except: self.log( Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getReal(col_name))) except: self.log( Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log( Level.INFO, "Error getting values from sql statement ==> " + resultSet_sql.getString("artifact_name")) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( Alexa_DB_ParseIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_1.close() stmt_sql.close() dbConn_x.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def __findContactsInDB(self, databasePath, abstractFile, dataSource): if not databasePath: return bbartifacts = list() try: Class.forName("org.sqlite.JDBC") # load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() except (ClassNotFoundException, SQLException) as ex: self._logger.log(Level.SEVERE, "Error opening database", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) return # Create a 'Device' account using the data source device id datasourceObjId = dataSource.getDataSource().getId() ds = Case.getCurrentCase().getSleuthkitCase().getDataSource(datasourceObjId) deviceID = ds.getDeviceId() deviceAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance (Account.Type.DEVICE, deviceID, general.MODULE_NAME, abstractFile) try: # get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype) # sorted by name, so phonenumber/email would be consecutive for a person if they exist. # check if contacts.name_raw_contact_id exists. Modify the query accordingly. columnFound = False metadata = connection.getMetaData() columnListResultSet = metadata.getColumns(None, None, "contacts", None) while columnListResultSet.next(): if columnListResultSet.getString("COLUMN_NAME") == "name_raw_contact_id": columnFound = True break if columnFound: resultSet = statement.executeQuery( "SELECT mimetype, data1, name_raw_contact.display_name AS display_name \n" + "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n" + "JOIN raw_contacts AS name_raw_contact ON(name_raw_contact_id=name_raw_contact._id) " + "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n" + "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n" + "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n" + "ORDER BY name_raw_contact.display_name ASC;") else: resultSet = statement.executeQuery( "SELECT mimetype, data1, raw_contacts.display_name AS display_name \n" + "FROM raw_contacts JOIN contacts ON (raw_contacts.contact_id=contacts._id) \n" + "LEFT OUTER JOIN data ON (data.raw_contact_id=raw_contacts._id) \n" + "LEFT OUTER JOIN mimetypes ON (data.mimetype_id=mimetypes._id) \n" + "WHERE mimetype = 'vnd.android.cursor.item/phone_v2' OR mimetype = 'vnd.android.cursor.item/email_v2'\n" + "ORDER BY raw_contacts.display_name ASC;") attributes = ArrayList() artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) oldName = "" while resultSet.next(): name = resultSet.getString("display_name") data1 = resultSet.getString("data1") # the phone number or email mimetype = resultSet.getString("mimetype") # either phone or email if name != oldName: artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, general.MODULE_NAME, name)) if mimetype == "vnd.android.cursor.item/phone_v2": attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, general.MODULE_NAME, data1)) acctType = Account.Type.PHONE else: attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL, general.MODULE_NAME, data1)) acctType = Account.Type.EMAIL artifact.addAttributes(attributes) # Create an account instance contactAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance (acctType, data1, general.MODULE_NAME, abstractFile); # create relationship between accounts Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addRelationships(deviceAccountInstance, [contactAccountInstance], artifact,Relationship.Type.CONTACT, 0); oldName = name bbartifacts.append(artifact) try: # index the artifact for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error("Failed to index contact artifact for keyword search.", artifact.getDisplayName()) except SQLException as ex: self._logger.log(Level.WARNING, "Unable to execute contacts SQL query against {0} : {1}", [databasePath, ex]) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Error posting to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) finally: if bbartifacts: IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(general.MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, bbartifacts)) try: if resultSet is not None: resultSet.close() statement.close() connection.close() except Exception as ex: self._logger.log(Level.SEVERE, "Error closing database", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() temp_dir = os.path.join(Temp_Dir, "Volatility") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Plaso Import Directory already exists " + Temp_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) #file_name = os.path.basename(self.path_to_storage_file) #self.log(Level.INFO, "File Name ==> " + file_name) #base_file_name = os.path.splitext(file_name)[0] #self.database_file = Temp_Dir + "\\volatility\\Plaso.db3" for file in files: self.log(Level.INFO, "File name to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) image_file = file.getLocalAbsPath() if image_file != None: self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) file_name = os.path.basename(file.getLocalAbsPath()) self.log(Level.INFO, "File Name ==> " + file_name) base_file_name = os.path.splitext(file_name)[0] self.database_file = os.path.join(temp_dir, base_file_name + ".db3") self.log(Level.INFO, "File Name ==> " + self.database_file) if self.isAutodetect: self.find_profile(image_file) if self.Profile == None: continue for plugin_to_run in self.Plugins: if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) pipe = Popen([self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC self.log(Level.INFO, "Path the volatility database file created ==> " + self.database_file) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.database_file) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + self.database_file + " (" + e.getMessage() + ")") try: exestmt = dbConn.createStatement() resultx = exestmt.execute('create table plugins_loaded_to_Autopsy (table_name text);') except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") # Query the database try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet1 = stmt.executeQuery("Select upper(tbl_name) tbl_name from SQLITE_MASTER where upper(tbl_name) " \ " not in (select table_name from plugins_loaded_to_Autopsy)" \ " and upper(tbl_name) <> 'PLUGINS_LOADED_TO_AUTOPSY';") # Cycle through each row and create artifacts while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts ==> " + resultSet1.getString("tbl_name")) artID_art = skCase.addArtifactType( "TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name, "Volatility " + \ resultSet1.getString("tbl_name") + " " + file_name) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") # Get the artifact and attributes artID_art = skCase.getArtifactTypeID("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) artID_art_evt = skCase.getArtifactType("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) try: self.log(Level.INFO, "Result (" + resultSet1.getString("tbl_name") + ")") table_name = resultSet1.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet1.getString("tbl_name") + " ") row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_VOL_" + table_name.upper() + "_" + file_name artID_sql = skCase.getArtifactTypeID(artifact_name) artID_sql_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_VOL_" + table_name + "_" + resultSet2.getString("name").upper() if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sql) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_VOL_" + table_name.upper() + "_" + Column_Names[Column_Number - 1] attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": if resultSet3.getString(Column_Number) == None: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, " ")) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getString(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName, \ artID_sql_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") try: # exestmt = createStatement() resultx = exestmt.execute("insert into plugins_loaded_to_Autopsy values ('" + table_name + "');") except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "VolatilitySettings", " VolatilitySettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, ".", "/") numFiles = len(files) #self.log(Level.INFO, "Number of files found " + str(numFiles) + " files") #self.log(Level.INFO, "found " + str(files) + " files") # progressBar.switchToDeterminate(numFiles) # fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing #Temp_Dir = Case.getCurrentCase().getTempDirectory() Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Plaso_Import") except: self.log(Level.INFO, "Plaso Import Directory already exists " + Temp_Dir) # Run the psort.exe program against the storage file to convert the storage file from native to SQLite self.database_file = Temp_Dir + "\\Plaso_Import\\Plaso_Import.db3" self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " -o 4n6time_sqlite -w " + Temp_Dir + "\\Plaso_Import\\" + \ "plaso_import.db3 " + self.local_settings.getPlaso_Storage_File()) pipe = Popen([ self.path_to_exe, "-o", "4n6time_sqlite", "-w", self.database_file, self.path_to_storage_file ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) plaso_db_file = Temp_Dir + "\Plaso_Import\Plaso_Import.db3" plaso_db_dir = Temp_Dir + "\Plaso_Import" self.log(Level.INFO, "Plaso DB File ==> " + plaso_db_file) for file in files: abstract_file_info = skCase.getAbstractFileById(file.getId()) #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info)) # Add dervived file file = skCase.addDerivedFile("Plaso_Import.db3", plaso_db_file, os.path.getsize(plaso_db_file), + \ 0, 0, 0, 0, True, abstract_file_info, "", "", "", "", TskData.EncodingType.NONE) self.log(Level.INFO, "Derived File ==> " + str(file)) # Create the Attributes for plaso try: attID_source = skCase.addArtifactAttributeType( "TSK_PLASO_SOURCE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Source") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Source ==> ") try: attID_sourcetype = skCase.addArtifactAttributeType( "TSK_PLASO_SOURCE_TYPE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Source Type") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Source Type ==> ") try: attID_type = skCase.addArtifactAttributeType( "TSK_PLASO_TYPE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Type") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Type ==> ") try: attID_desc = skCase.addArtifactAttributeType( "TSK_PLASO_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Description") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Description ==> ") try: attID_filename = skCase.addArtifactAttributeType( "TSK_PLASO_FILENAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso File Name") except: self.log(Level.INFO, "Attributes Creation Error, Plaso File Name ==> ") try: attID_format = skCase.addArtifactAttributeType( "TSK_PLASO_FORMAT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Format") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Format ==> ") try: attID_extra = skCase.addArtifactAttributeType( "TSK_PLASO_EXTRA", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso Extra") except: self.log(Level.INFO, "Attributes Creation Error, Plaso Extra ==> ") try: attID_vss_num = skCase.addArtifactAttributeType( "TSK_PLASO_VSS_STORE_NUM", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Plaso VSS Store Num") except: self.log(Level.INFO, "Attributes Creation Error, Plaso VSS Store Num ==> ") # Get the artifact and attributes attID_source = skCase.getAttributeType("TSK_PLASO_SOURCE") artID_sourcetype = skCase.getAttributeType("TSK_PLASO_SOURCE_TYPE") attID_type = skCase.getAttributeType("TSK_PLASO_TYPE") attID_desc = skCase.getAttributeType("TSK_PLASO_DESCRIPTION") self.log(Level.INFO, "Description Attribute ==> " + str(attID_desc)) attID_filename = skCase.getAttributeType("TSK_PLASO_FILENAME") attID_format = skCase.getAttributeType("TSK_PLASO_FORMAT") attID_extra = skCase.getAttributeType("TSK_PLASO_EXTRA") attID_vss_num = skCase.getAttributeType("TSK_PLASO_VSS_STORE_NUM") # Open the DB using JDBC lclDbPath = os.path.join(plaso_db_dir, "Plaso_Import.db3") self.log( Level.INFO, "Path the Plaso Import file database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) plaso_Import.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the l2t_sources table to include or exclude FILES based on user response try: stmt = dbConn.createStatement() l2t_sources_sql = "select sources from l2t_sources" if self.exclude_file_sources: l2t_sources_sql = l2t_sources_sql + " where sources != 'FILE'" self.log(Level.INFO, l2t_sources_sql) resultSet = stmt.executeQuery(l2t_sources_sql) self.log(Level.INFO, "query l2t_sources table") except SQLException as e: self.log( Level.INFO, "Error querying database for l2t_sources table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): # Create the safari last session artifact try: self.log( Level.INFO, "Begin Create New Artifacts ==> " + resultSet.getString("sources")) artID_art = skCase.addArtifactType( "TSK_PLASO" + resultSet.getString("sources"), "Plaso Source " + \ resultSet.getString("sources")) except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) # Get the artifact and attributes artID_art = skCase.getArtifactTypeID( "TSK_PLASO" + resultSet.getString("sources")) artID_art_evt = skCase.getArtifactType( "TSK_PLASO" + resultSet.getString("sources")) try: stmt = dbConn.createStatement() log2timeline_sql = "select source, sourcetype, type, description, filename, format, extra, " + \ " strftime('%s',datetime) 'datetime', vss_store_number, url " + \ " from log2timeline where source = '" + resultSet.getString("sources") + "';" self.log(Level.INFO, log2timeline_sql) resultSet2 = stmt.executeQuery(log2timeline_sql) self.log(Level.INFO, "query lastsession table") except SQLException as e: self.log( Level.INFO, "Error querying database for log2timeline table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet2.next(): try: art = file.newArtifact(artID_art) #self.log(Level.INFO, "Inserting attribute source ==> 2") art.addAttribute( BlackboardAttribute( artID_sourcetype, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("source"))) #self.log(Level.INFO, "Inserting attribute sourcetype") art.addAttribute( BlackboardAttribute( artID_sourcetype, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("sourcetype"))) #self.log(Level.INFO, "Inserting attribute Type") art.addAttribute( BlackboardAttribute( attID_type, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("type"))) #self.log(Level.INFO, "Inserting attribute description") art.addAttribute( BlackboardAttribute( attID_desc, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("description"))) #self.log(Level.INFO, "Inserting attribute filename") art.addAttribute( BlackboardAttribute( attID_filename, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("filename"))) #self.log(Level.INFO, "Inserting attribute format") art.addAttribute( BlackboardAttribute( attID_format, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("format"))) #self.log(Level.INFO, "Inserting attribute extra") art.addAttribute( BlackboardAttribute( attID_extra, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("extra"))) #self.log(Level.INFO, "Inserting attribute Date/Time") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME. getTypeID(), Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getInt("datetime"))) #self.log(Level.INFO, "Inserting attribute vss_store_number") art.addAttribute( BlackboardAttribute( attID_vss_num, Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("vss_store_number"))) #self.log(Level.INFO, "Inserting attribute URL") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL. getTypeID(), Plaso_ImportIngestModuleFactory.moduleName, resultSet2.getString("URL"))) except SQLException as e: self.log( Level.INFO, "Error getting values from the Log2timeline table (" + e.getMessage() + ")") IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(Plaso_ImportIngestModuleFactory.moduleName, artID_art_evt, None)) # stmt.close() # dbConn.close() # Clean up #os.remove(lclDbPath) #Clean up EventLog directory and files # for file in files: # try: # os.remove(Temp_Dir + "\\" + file.getName()) # except: # self.log(Level.INFO, "removal of Safari lastsession file failed " + Temp_Dir + "\\" + file.getName()) # try: # os.rmdir(Temp_Dir) # except: # self.log(Level.INFO, "removal of Safari session directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Plaso_ImportSettings", " Plaso_ImportSettings Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK