def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write(resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For an example, we will flag files with .txt in the name and make a blackboard artifact. if file.getName().lower().endswith(".txt"): self.log(Level.INFO, "Found a text file: " + file.getName()) self.filesFound += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonFileIngestModuleFactory.moduleName, "Text Files") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( SampleJythonFileIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) # For the example (this wouldn't be needed normally), we'll query the blackboard for data that was added # by other modules. We then iterate over its attributes. We'll just print them, but you would probably # want to do something with them. artifactList = file.getArtifacts( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) for artifact in artifactList: attributeList = artifact.getAttributes() for attrib in attributeList: self.log(Level.INFO, attrib.toString()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # we don't know how much work there is yet progressBar.switchToIndeterminate() autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): try: if(filename): dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % filename) stmt = dbConn.createStatement() path = file.getParentPath()+file.getName() resultSet = stmt.executeQuery("SELECT * FROM META WHERE Path == '%s'" % path) if(resultSet.next()): temp = "Future Improvement" else: art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MatchMetaInfoIngestModuleFactory.moduleName, "Unknown Meta") art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(MatchMetaInfoIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); stmt.close() dbConn.close() except: pass return IngestModule.ProcessResult.OK
def process(self, file): # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Look for files bigger than 10MB that are a multiple of 4096 if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)): # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) return IngestModule.ProcessResult.OK
def add_Volatility_Dump_file(self, dataSource, dir_abstract_file_info, dump_dir, local_dir, pid_name): self.log(Level.INFO, "Adding Files from Dump Directory") self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) self.log(Level.INFO, "Parent Path is ==> " + str(dir_abstract_file_info)) #skCase = Case.getCurrentCase().getSleuthkitCase() skCase = Case.getCurrentCase().getServices().getFileManager() files = next(os.walk(dump_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(dump_dir, file) local_file = os.path.join(local_dir, file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not (self.check_derived_existance( dataSource, file, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent( ModuleContentEvent(derived_file)) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass
def add_Volatility_Dump_file(self, dataSource, dir_abstract_file_info, dump_dir, local_dir, pid_name): self.log(Level.INFO, "Adding Files from Dump Directory") self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) self.log(Level.INFO, "Parent Path is ==> " + str(dir_abstract_file_info)) #skCase = Case.getCurrentCase().getSleuthkitCase() skCase = Case.getCurrentCase().getServices().getFileManager() files = next(os.walk(dump_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(dump_dir, file) local_file = os.path.join(local_dir, file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_derived_existance(dataSource, file, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write( resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Look for files bigger than 10MB that are a multiple of 4096 if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)): # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") art.addAttribute(att) # This will work in 4.0.1 and beyond #try: # # index the artifact for keyword search # blackboard.indexArtifact(art) #except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger = Logger.getLogger(SampleJythonDataSourceIngestModuleFactory.moduleName) # we don't know how much work there is yet progressBar.switchToIndeterminate() autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def index_artifact(artifact, artifact_type): try: Case.getCurrentCase().getServices().getBlackboard().indexArtifact( artifact) except: logging.warning("Error indexing artifact type: " + artifact_type) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent("Forensics Analyzer", artifact_type, None))
def process(self, file): skCase = Case.getCurrentCase().getSleuthkitCase() artID_ns_ss = skCase.getArtifactType("TSK_ART_NS_SCREENSHOTS") artID_ns_ss_id = skCase.getArtifactTypeID("TSK_ART_NS_SCREENSHOTS") attID_ns_gid = skCase.getAttributeType("TSK_ATT_NS_GAME") attID_ns_ts = skCase.getAttributeType("TSK_ATT_NS_TIMESTAMP") # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() is False)): return IngestModule.ProcessResult.OK # Flag files with .jpg in the name and make a blackboard artifact. if file.getName().lower().endswith(".jpg") or file.getName().lower().endswith(".mp4") or file.getName().lower().endswith(".png"): if re.match(r"[0-9]{16}-[0-9a-fA-F]{32}\.(jpg|png|mp4)", file.getName()): self.log(Level.INFO, "Found a Switch screenshot: " + file.getName()) self.filesFound += 1 self.path_to_data = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'game_hash_ids.json') if not os.path.exists(self.path_to_data): raise IngestModuleException("game_ids was not found in module folder") filename = file.getName().upper() timestamp = filename.split("-")[0] parsed_ts = datetime.strptime(timestamp, "%Y%m%d%H%M%S%f").strftime('%H:%M %d/%m/%Y') gameID = filename.split("-")[1].split(".")[0] with open(self.path_to_data, "r") as data_file: gids = json.load(data_file) if gameID in gids: game = gids[gameID] else: game = "Unknown gameID" # Don't add to blackboard if the artifact already exists artifactList = file.getArtifacts(artID_ns_ss_id) for artifact in artifactList: dupe_test = artifact.getAttribute(attID_ns_gid) if dupe_test: return IngestModule.ProcessResult.OK art = file.newArtifact(artID_ns_ss_id) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FindScreenshotsIngestModuleFactory.moduleName, "Nintendo Switch - Screenshots")) art.addAttribute(BlackboardAttribute(attID_ns_gid, FindScreenshotsIngestModuleFactory.moduleName, game)) art.addAttribute(BlackboardAttribute(attID_ns_ts, FindScreenshotsIngestModuleFactory.moduleName, parsed_ts)) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(FindScreenshotsIngestModuleFactory.moduleName, artID_ns_ss, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process") skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing exportDirectory = Case.getCurrentCase().getExportDirectory() exportDir = os.path.join(exportDirectory, "Mass_Export") self.log(Level.INFO, "create Directory " + exportDir) try: os.mkdir(exportDir) except: self.log(Level.INFO, "Mass Export directory already exists" + exportDir) for fileExtension in self.extensionList: fileExt = fileExtension.strip() files = fileManager.findFiles(dataSource, "%." + fileExt) numFiles = len(files) self.log( Level.INFO, "found " + str(numFiles) + " files for extension ==> " + str(fileExtension)) expDir = os.path.join(exportDir, fileExt) try: os.mkdir(expDir) except: self.log(Level.INFO, "Directory already exists ==> " + str(expDir)) for file in files: #self.log(Level.INFO, 'Parent Path is ==> ' + file.getParentPath()) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Writing file ==> " + file.getName()) extractedFile = os.path.join( expDir, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) except: self.log( Level.INFO, "Error writing File " + os.path.join(temporaryDirectory, file.getName())) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Mass Export By Extension Complete", "Complete") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): def luhnChecksumIsValid(cardNumber): # check to make sure that the card passes a luhn mod-10 checksum total = 0 oddTotal = 0 evenTotal = 0 reversedCardNumber = cardNumber[::-1] oddDigits = reversedCardNumber[0::2] evenDigits = reversedCardNumber[1::2] for count in range(0, len(oddDigits)): oddTotal += int(oddDigits[count]) for count in range(0, len(evenDigits)): evenDigit = int(evenDigits[count]) evenDigit = evenDigit * 2 if evenDigit > 9: evenDigit = evenDigit - 9 evenTotal += evenDigit total = oddTotal + evenTotal return (total % 10 == 0) # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK inputStream = ReadContentInputStream(file) text = IOUtils.toString(inputStream, StandardCharsets.UTF_8) if self.skipBinaries: if b'\x00' in text: return IngestModule.ProcessResult.OK initialCCPattern = '[1-6](?:\d[ -]*?){13,23}' possibleCCs = re.findall(initialCCPattern, text, re.IGNORECASE) self.fileFlagged = 0 if possibleCCs: for cc in possibleCCs: delim_regex = "\D+" cc = re.sub(delim_regex, '', cc) if luhnChecksumIsValid(cc): if self.fileFlagged == 0: self.filesFound += 1 art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE. TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME. getTypeID(), PaymentCardFileIngestModuleFactory.moduleName, "Files With Possible Payment Card Numbers") art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( PaymentCardFileIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE. TSK_INTERESTING_FILE_HIT, None)) self.fileFlagged = 1 return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Blackboard API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_blackboard.html blackboard = Case.getCurrentCase().getServices().getBlackboard() # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() # Querying all videos files = [] for extension in ALLOWED_EXTENSIONS: try: files.extend(fileManager.findFiles(dataSource, "%" + extension)) except TskCoreException: self.log(Level.INFO, "Error getting files from: '" + extension + "'") numFiles = len(files) if len(numFiles) == 0: self.log(Level.ERROR, "Didn't find any usable files! Terminating") return DataSourceIngestModule.ProcessResult.OK self.log(Level.INFO, "Found " + str(numFiles) + " files") module_output_dir = Case.getCurrentCase().getModuleDirectory() module_dir = os.path.join(output_dir, dataSource.getName(), C_FDRI_DIR) # Calling thread to do the work # This can/will block for a long time executable_thread = Thread( target=lambda: self.thread_work(self.pathToExe, configFilePath)) executable_thread.start() # Killing thread if user press cancel # Seems kinda bad but it's the most responsive way possible to cancel the process while (executable_thread.isAlive()): # Checking cancel every secund if self.context.isJobCancelled(): self.log(Level.INFO, "User cancelled job! Terminating thread") JThread.interrupt(executable_thread) self.log(Level.INFO, "Thread terminated") self.deleteFiles(module_dir) return IngestModule.ProcessResult.OK time.sleep(1) #Post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join( Case.getCurrentCase().getTempDirectory(), str(file.getId()) + "." + file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen( [self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath() + file.getName() + '\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def index_artifact(self, blackboard, artifact, artifact_type): try: # Index the artifact for keyword search blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as e: self.log(Level.INFO, "Error indexing artifact " + artifact.getDisplayName() + " " +str(e)) # Fire an event to notify the UI and others that there is a new log artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(NotificationAnalyzerDataSourceIngestModuleFactory.moduleName, artifact_type, None))
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() # For an example, we will flag files with .txt in the name and make a blackboard artifact. if file.getName().lower().endswith(".txt"): self.log(Level.INFO, "Found a text file: " + file.getName()) self.filesFound+=1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonFileIngestModuleFactory.moduleName, "Text Files") art.addAttribute(att) # This will work in 4.0.1 and beyond #try: # # index the artifact for keyword search # blackboard.indexArtifact(art) #except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(SampleJythonFileIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); # For the example (this wouldn't be needed normally), we'll query the blackboard for data that was added # by other modules. We then iterate over its attributes. We'll just print them, but you would probably # want to do something with them. artifactList = file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) for artifact in artifactList: attributeList = artifact.getAttributes(); for attrib in attributeList: self.log(Level.INFO, attrib.toString()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() chatFiles = fileManager.findFiles(dataSource, "%.txt", "/Documents/RingCentral/Meetings") numFiles = len(chatFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create RingCentral directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "RingCentral") try: os.mkdir(temporaryDirectory) except: pass # get and write out chat meeting files for file in chatFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.chatMeetingLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # try: # shutil.rmtree(temporaryDirectory) # except: # self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "CentralRing", " CentralRing Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): skCase = Case.getCurrentCase().getSleuthkitCase() ARTID_NS_TV = skCase.getArtifactTypeID(self.ARTIFACTTYPENAME_NS_TV) names = [] skCase = Case.getCurrentCase().getSleuthkitCase() # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() is False)): return IngestModule.ProcessResult.OK blackboard = Case.getCurrentCase().getServices().getBlackboard() if file.getName() == "80000000000000d1": artifactList = file.getArtifacts(ARTID_NS_TV) self.log(Level.INFO, "Found the file" + file.getName()) self.filesFound += 1 inputStream = ReadContentInputStream(file) buffer = jarray.zeros(2048, "b") totLen = 0 lengthofbuffer = inputStream.read(buffer) while lengthofbuffer != -1: totLen = totLen + lengthofbuffer lengthofbuffer = inputStream.read(buffer) currentBuffer = buffer.tostring() names = names + re.findall("EdidBlock.*?\\\\xfc\\\\x00(.*?)\\\\n.*?EdidExtensionBlock", repr(currentBuffer)) noduplicatesnames = list(set(names)) for tvname in noduplicatesnames: # Don't add to blackboard if the artifact already exists for artifact in artifactList: artifactName = artifact.getAttribute(self.NS_DISPLAY_ATTRIBUTES["Name"][3]) if artifactName.getValueString() == tvname: return IngestModule.ProcessResult.OK art = file.newArtifact(ARTID_NS_TV) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), ConnectedDisplayIngestModuleFactory.moduleName, "Nintendo Switch - Connected TV")) for attribute in self.NS_DISPLAY_ATTRIBUTES.keys(): art.addAttribute(BlackboardAttribute(self.NS_DISPLAY_ATTRIBUTES[attribute][3], ConnectedDisplayIngestModuleFactory.moduleName, str(tvname))) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ConnectedDisplayIngestModuleFactory.moduleName, skCase.getArtifactType(self.ARTIFACTTYPENAME_NS_TV), None)) return IngestModule.ProcessResult.OK
def addInterestingFileHitAttribute(artifact, moduleName, attributeName): attribute = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), moduleName, attributeName) artifact.addAttribute(attribute) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (not file.isFile())): return IngestModule.ProcessResult.OK def getBlackboardAtt(label, value): return BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.fromLabel( label).getTypeID(), GeolocationBlackvue.moduleName, value) if file.getName().lower().endswith(".mp4"): self.log( Level.INFO, "Found a mp4 file, possibly a BlackVue dashcam recording: " + file.getName()) platform_suffix = '.exe' if hasattr(platform, 'win32_ver') else '' # get an input buffer filesize = file.getSize() buffer = jarray.zeros(filesize, 'b') file.read(buffer, 0, filesize) file.close() temporary = tempfile.NamedTemporaryFile() temporary.write(buffer) # call our "binary" and supply our temporary file # TODO pipe our file in instead of making a temporary copy output = subprocess.check_output( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'dist', 'parse_mp4') + platform_suffix + ' ' + temporary.name) locations = json.loads(output) for unix, lat, lon in locations: art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) lat = getBlackboardAtt("TSK_GEO_LATITUDE", lat) lon = getBlackboardAtt("TSK_GEO_LONGITUDE", lon) art.addAttributes([lat, lon]) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( GeolocationBlackvue.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT, None)) return IngestModule.ProcessResult.OK
def add_Volatility_Dump_dir(self, dataSource, dir_abstract_file_info, dump_dir, dir_name, local_dir): skCase = Case.getCurrentCase().getSleuthkitCase() self.log(Level.INFO, " dir Name is ==> " + dir_name) self.log( Level.INFO, " abstract parentPath is ==> " + str(dir_abstract_file_info.parentPath)) self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) dev_file = os.path.join(dump_dir, dir_name) local_file = os.path.join(local_dir, dir_name) if not (self.check_derived_existance( dataSource, dir_name, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(dir_name, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent( ModuleContentEvent(derived_file)) # self.context.addFilesToJob(df_list) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass #self.log(Level.INFO, " derived File Is ==> " + str(derived_file)) fileManager = Case.getCurrentCase().getServices().getFileManager() new_derived_file = fileManager.findFiles( dataSource, dir_name, dir_abstract_file_info.parentPath) numFiles = len(new_derived_file) self.log(Level.INFO, " print number of files is " + str(numFiles)) for file in new_derived_file: self.log(Level.INFO, "File Exists ==> " + str(file)) self.log(Level.INFO, "Local Directory ==> " + str(file.localPath)) self.log(Level.INFO, "Local Directory ==> " + local_file) if local_file == file.localPath: self.log(Level.INFO, "File Exists ==> " + str(file)) return file self.log(Level.INFO, "File Exists2 ==> " + str(new_derived_file[0])) return new_derived_file[0]
def addAttachments(self, attachments, skCase, dataSource, bbArtifact): self.log(Level.INFO, "Attachments to add ==> " + str(attachments)) for attachment in attachments: if type(attachment) != list: fileName = os.path.basename(attachment) # Hard Coded path should be changed to being passed in relativeFileName = os.path.join(os.path.join('ModuleOutput', 'Email-Eml'), fileName) # Add derived file # Parameters Are: # File Name, Local Rel Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(fileName, relativeFileName, os.path.getsize(attachment), 0, 0, 0, 0, True, \ bbArtifact, "", ProcessEmlEmailIngestModuleFactory.moduleName, "1.0", "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file))
def addInterestingFileHitAttributes(artifact, moduleName, attributesDict): attributesList = [] for m in range(0, len(attributesDict)): attribute = BlackboardAttribute( list(attributesDict.values())[m].getTypeID(), moduleName, list(attributesDict)[m]) attributesList.append(attribute) artifact.addAttributes(attributesList) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId())+"."+file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen([self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath()+file.getName()+'\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def shutDown(self): # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, GameHistoryIngestModuleFactory.moduleName, str(self.filesFound) + " recent game history found") ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to ???????") self.log(Level.INFO, "ending process, Just before call to ??????") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "GUI_Test_SQLSettings", " GUI_Test_SQLSettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def postInformationForTheUser(self, moduleName, levelType, messageType, sentence): self.log(levelType, sentence) message = IngestMessage.createMessage(messageType, moduleName, sentence) ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, file): # If the file has a txt extension, post an artifact to the blackboard. if file.getName().find("test") != -1: art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), "Sample Jython File Ingest Module", "Text Files") art.addAttribute(att) # Read the contents of the file. inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) # Send the size of the file to the ingest messages in box. msgText = "Size of %s is %d bytes" % ((file.getName(), totLen)) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Sample Jython File IngestModule", msgText) ingestServices = IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def shutDown(self): # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, ExportAllImagesVideoesAudioFactory.moduleName, str(self.filesFound) + " files found") ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to ???????") self.log(Level.INFO, "ending process, Just before call to ??????") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "GUI_Test", " GUI_Test Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): skCase = Case.getCurrentCase().getSleuthkitCase() artID_ns_lboot = skCase.getArtifactType("TSK_ART_NS_LBOOT") artID_ns_lboot_id = skCase.getArtifactTypeID("TSK_ART_NS_LBOOT") attID_ns_lboot = skCase.getAttributeType("TSK_ATT_NS_LBOOT") if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() is False)): return IngestModule.ProcessResult.OK if file.getName().upper() == "8000000000000060": self.log(Level.INFO, "Found a Bootup timestamp: " + file.getName()) self.filesFound += 1 timestamp = file.getMtimeAsDate() # Lets not add to blackboard if the artifact already exists artifactList = file.getArtifacts(artID_ns_lboot_id) for artifact in artifactList: dupe_test = artifact.getAttribute(attID_ns_lboot) if dupe_test: return IngestModule.ProcessResult.OK art = file.newArtifact(artID_ns_lboot_id) art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID( ), FindLastBootIngestModuleFactory.moduleName, "Nintendo Switch - Last Boot Time")) art.addAttribute( BlackboardAttribute(attID_ns_lboot, FindLastBootIngestModuleFactory.moduleName, timestamp)) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FindLastBootIngestModuleFactory.moduleName, artID_ns_lboot, None)) return IngestModule.ProcessResult.OK
def shutDown(self): if self.filesFound: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, PaymentCardFileIngestModuleFactory.moduleName, str(self.filesFound) + " file(s) found with possible payment card numbers") ingestServices = IngestServices.getInstance().postMessage(message)
def shutDown(self): message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, MpUserHistoryIngestModuleFactory.moduleName, str(self.filesFound) + " users found") _ = IngestServices.getInstance().postMessage(message) # remove temp dir after use if os.path.exists(self.tmp_path): shutil.rmtree(self.tmp_path)
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() self.startModule("pdf", skCase, fileManager, dataSource, progressBar) self.startModule("docx", skCase, fileManager, dataSource, progressBar) self.startModule("pptx", skCase, fileManager, dataSource, progressBar) self.startModule("xlsx", skCase, fileManager, dataSource, progressBar) self.startModule("doc", skCase, fileManager, dataSource, progressBar) self.startModule("ppt", skCase, fileManager, dataSource, progressBar) self.startModule("xls", skCase, fileManager, dataSource, progressBar) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "DocumentMetadataParser", "Found %d files" % self.totalCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def add_Volatility_Dump_dir(self, dataSource, dir_abstract_file_info, dump_dir, dir_name, local_dir): skCase = Case.getCurrentCase().getSleuthkitCase() self.log(Level.INFO, " dir Name is ==> " + dir_name) self.log(Level.INFO, " abstract parentPath is ==> " + str(dir_abstract_file_info.parentPath)) self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) dev_file = os.path.join(dump_dir, dir_name) local_file = os.path.join(local_dir, dir_name) if not(self.check_derived_existance(dataSource, dir_name, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(dir_name, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) # self.context.addFilesToJob(df_list) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass #self.log(Level.INFO, " derived File Is ==> " + str(derived_file)) fileManager = Case.getCurrentCase().getServices().getFileManager() new_derived_file = fileManager.findFiles(dataSource, dir_name, dir_abstract_file_info.parentPath) numFiles = len(new_derived_file) self.log(Level.INFO, " print number of files is " + str(numFiles)) for file in new_derived_file: self.log(Level.INFO, "File Exists ==> " + str(file)) self.log(Level.INFO, "Local Directory ==> " + str(file.localPath)) self.log(Level.INFO, "Local Directory ==> " + local_file) if local_file == file.localPath: self.log(Level.INFO, "File Exists ==> " + str(file)) return file self.log(Level.INFO, "File Exists2 ==> " + str(new_derived_file[0])) return new_derived_file[0]
def shutDown(self): self.xmlname += "_"+str(self.picFound)+str(self.dbFound)+str(self.jsonFound)+str(self.filesFound)+"_androidgeodata.xml" report = open(self.xmlname, 'w') report.write( str(xml.dom.minidom.parseString(et.tostring(self.root)).toprettyxml())+ " \n <!-- Report of possible other coordinates --> \n <!--"+ str(xml.dom.minidom.parseString(et.tostring(self.root_report)).toprettyxml()) +"-->" ) report.close() Case.getCurrentCase().addReport(self.xmlname, AndroidGeodataCrawlerFactory.moduleName, "AndroidGeodata XML") message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, AndroidGeodataCrawlerFactory.moduleName, "In this thread: "+ str(self.filesFound)+" files found, "+ str(self.picFound)+" pictures, "+ str(self.dbFound)+" DBs and "+ str(self.jsonFound)+" json processed. " "\n A xml ("+self.xmlname+") and a report have been created ") IngestServices.getInstance().postMessage(message)
def create_an_artifact(self, blackboard, file, title): art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), AutopsyImageClassificationModuleFactory.moduleName, title) art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( AutopsyImageClassificationModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT))
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Configure progress bar for 2 tasks progressBar.switchToDeterminate(2) autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # Get count of files with "test" in name. fileCount = 0; files = fileManager.findFiles(dataSource, "%test%") for file in files: fileCount += 1 progressBar.progress(1) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Get files by creation time. currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) # Go back two weeks. otherFiles = sleuthkitCase.findAllFilesWhere("crtime > %d" % minTime) for otherFile in otherFiles: fileCount += 1 progressBar.progress(1); if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK; #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Look for files bigger than 10MB that are a multiple of 4096 if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)): # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") art.addAttribute(att) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); return IngestModule.ProcessResult.OK
def process(self, file): # If the file has a txt extension, post an artifact to the blackboard. if file.getName().find("test") != -1: art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), "Sample Jython File Ingest Module", "Text Files") art.addAttribute(att) # Read the contents of the file. inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) # Send the size of the file to the ingest messages in box. msgText = "Size of %s is %d bytes" % ((file.getName(), totLen)) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython File IngestModule", msgText) ingestServices = IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK if (file.getNameExtension() == "evtx"): art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FileMarkIngestModuleFactory.moduleName, "Event Logs") art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FileMarkIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); if (file.getName() == "pagefile.sys" or file.getName() == "hiberfil.sys" or file.getName() == "MEMORY.DMP"): art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FileMarkIngestModuleFactory.moduleName, "Memory") art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FileMarkIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); if (file.getName() == "SYSTEM" or file.getName() == "SECURITY" or file.getName() == "SOFTWARE" or file.getName() == "SAM" or file.getName() == "NTUSER.DAT" or file.getName() == "UsrClass.dat" or file.getName() == "RecentFileCache.bcf" or file.getName() == "Amcache.hve"): art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FileMarkIngestModuleFactory.moduleName, "Registry") art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FileMarkIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); self.log(Level.INFO, "Starting Processing of Image") image_names = dataSource.getPaths() self.log(Level.INFO, "Image names ==> " + str(image_names[0])) image_name = str(image_names[0]) # Create VSS directory in ModuleOutput directory, if it exists then continue on processing Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) vss_output = os.path.join(Mod_Dir, "vss") try: os.mkdir(vss_output) except: self.log(Level.INFO, "Vss already exists " + Temp_Dir) lclDbPath = os.path.join(vss_output, "vss_extract_info.db3") vss_error_log = os.path.join(vss_output, "bad_files.log") # Run the Processing/Extraction process self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_vss + " " + image_name + " " + lclDbPath + " " + vss_output + " " + vss_error_log) pipe = Popen([self.path_to_exe_vss, image_name, lclDbPath, vss_output, vss_error_log], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) try: attID_vs_fn = skCase.addArtifactAttributeType("TSK_VSS_MFT_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "MFT Number") except: self.log(Level.INFO, "Attributes Creation Error, MFT Number. ==> ") try: attID_vs_ct = skCase.addArtifactAttributeType("TSK_VSS_DATETIME_CHANGED", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, changed time. ==> ") try: attID_vs_sz = skCase.addArtifactAttributeType("TSK_VSS_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " +" (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() SQL_Statement = "select ' - '||vss_identifier||' - '||DATETIME((SUBSTR(vss_create_dttm,1,11)-11644473600),'UNIXEPOCH') 'VOL_NAME', " + \ " vss_num, volume_id, vss_identifier from vss_info;" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): dir_list = [] vss_identifier = resultSet.getString("vss_identifier") vss_num = int(resultSet.getString("vss_num")) - 1 dir_list.append(vss_output + "\\vss" + str(vss_num)) services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager.addLocalFilesDataSource(str(device_id), "vss" + str(vss_num) + resultSet.getString("VOL_NAME"), "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) #self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) #skcase.notifyDataSourceAdded(device_id) skCse = Case.getCurrentCase().getSleuthkitCase() vss_fileManager = Case.getCurrentCase().getServices().getFileManager() vss_files = fileManager.findFiles(dataSource, "%" + vss_identifier + "%", "System Volume Information") vss_numFiles = len(vss_files) #self.log(Level.INFO, "Number of VSS FIles is ==> " + str(vss_numFiles) + " <<= FIle Name is ++> " + str(vss_files)) for vs in vss_files: if vs.getName() in "-slack": pass try: self.log(Level.INFO, "Begin Create New Artifacts") artID_vss = skCase.addArtifactType( "TSK_VS_VOLUME_" + str(vss_num), "vss" + str(vss_num) + resultSet.getString("VOL_NAME") + " Files") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_vss = skCase.getArtifactTypeID("TSK_VS_VOLUME_" + str(vss_num)) artID_vss = skCase.getArtifactTypeID("TSK_VS_VOLUME_" + str(vss_num)) artID_vss_evt = skCase.getArtifactType("TSK_VS_VOLUME_" + str(vss_num)) attID_vs_fn = skCase.getAttributeType("TSK_VSS_MFT_NUMBER") attID_vs_ct = skCase.getAttributeType("TSK_VSS_DATETIME_CHANGED") attID_vs_sz = skCase.getAttributeType("TSK_VSS_FILE_SIZE") attID_vs_nm = skCase.getAttributeType("TSK_NAME") attID_vs_pa = skCase.getAttributeType("TSK_PATH") attID_vs_md = skCase.getAttributeType("TSK_DATETIME_MODIFIED") attID_vs_ad = skCase.getAttributeType("TSK_DATETIME_ACCESSED") attID_vs_cr = skCase.getAttributeType("TSK_DATETIME_CREATED") for vs_file in vss_files: if "-slack" in vs_file.getName(): pass else: self.log(Level.INFO, "VSS FIles is ==> " + str(vs_file)) try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select file_name, inode, directory, ctime, mtime, atime, crtime, size " + \ " from vss1_diff where lower(f_type) <> 'dir';" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for vss diff tables (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet_1.next(): try: File_Name = resultSet_1.getString("file_name") Path_Name = resultSet_1.getString("directory") MFT_Number = resultSet_1.getString("inode") Ctime = resultSet_1.getInt("ctime") Mtime = resultSet_1.getInt("mtime") Atime = resultSet_1.getInt("atime") Crtime = resultSet_1.getInt("crtime") File_Size = resultSet_1.getInt("size") except SQLException as e: self.log(Level.INFO, "Error getting values from vss diff table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = vs_file.newArtifact(artID_vss) art.addAttributes(((BlackboardAttribute(attID_vs_nm, VSSIngesttModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_vs_fn, VSSIngesttModuleFactory.moduleName, MFT_Number)), \ (BlackboardAttribute(attID_vs_pa, VSSIngesttModuleFactory.moduleName, Path_Name)), \ (BlackboardAttribute(attID_vs_cr, VSSIngesttModuleFactory.moduleName, Crtime)), \ (BlackboardAttribute(attID_vs_md, VSSIngesttModuleFactory.moduleName, Mtime)), \ (BlackboardAttribute(attID_vs_ad, VSSIngesttModuleFactory.moduleName, Atime)), \ (BlackboardAttribute(attID_vs_ct, VSSIngesttModuleFactory.moduleName, Ctime)), (BlackboardAttribute(attID_vs_sz, VSSIngesttModuleFactory.moduleName, File_Size)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(VSSIngesttModuleFactory.moduleName, artID_vss_evt, None)) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Process/Extract VS", " Volume Shadow has been analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); try: self.log(Level.INFO, "Begin Create New Artifacts") artID_pf = skCase.addArtifactType( "TSK_PREFETCH", "Windows Prefetch") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") # Create the attribute type, if it exists then catch the error try: attID_pf_fn = skCase.addArtifactAttributeType("TSK_PREFETCH_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Prefetch File Name") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch File Name. ==> ") try: attID_pf_an = skCase.addArtifactAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Actual File Name") except: self.log(Level.INFO, "Attributes Creation Error, Actual File Name. ==> ") try: attID_nr = skCase.addArtifactAttributeType("TSK_PF_RUN_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Program Number Runs") except: self.log(Level.INFO, "Attributes Creation Error, Program Number Runs. ==> ") try: attID_ex1 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_1", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 1") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 1. ==> ") try: attID_ex2 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_2", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 2") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 2. ==> ") try: attID_ex3 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_3", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 3") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_ex4 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_4", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 4") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 4 ==> ") try: attID_ex5 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_5", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 5") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 5. ==> ") try: attID_ex6 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_6", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 6") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 6. ==> ") try: attID_ex7 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_7", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 7") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 7. ==> ") try: attID_ex8 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_8", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 8") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 8 ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") artID_pf_evt = skCase.getArtifactType("TSK_PREFETCH") attID_pf_fn = skCase.getAttributeType("TSK_PREFETCH_FILE_NAME") attID_pf_an = skCase.getAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME") attID_nr = skCase.getAttributeType("TSK_PF_RUN_COUNT") attID_ex1 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_1") attID_ex2 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_2") attID_ex3 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_3") attID_ex4 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_4") attID_ex5 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_5") attID_ex6 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_6") attID_ex7 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_7") attID_ex8 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_8") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the prefetch files and the layout.ini file from the /windows/prefetch folder fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.pf") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create prefetch directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "Prefetch_Files") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "Prefetch Directory already exists " + Temp_Dir) # Write out each prefetch file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Case.getCurrentCase().getTempDirectory()) subprocess.Popen([self.path_to_exe, Temp_Dir, os.path.join(Temp_Dir, "Autopsy_PF_DB.db3")]).communicate()[0] # Set the database to be read to the once created by the prefetch parser program lclDbPath = os.path.join(Temp_Dir, "Autopsy_PF_DB.db3") self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select prefetch_File_Name, actual_File_Name, Number_time_file_run, " + " Embeded_date_Time_Unix_1, " + " Embeded_date_Time_Unix_2, " + " Embeded_date_Time_Unix_3, " + " Embeded_date_Time_Unix_4, " + " Embeded_date_Time_Unix_5, " + " Embeded_date_Time_Unix_6, " + " Embeded_date_Time_Unix_7, " + " Embeded_date_Time_Unix_8 " + " from prefetch_file_info ") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") Prefetch_File_Name = resultSet.getString("Prefetch_File_Name") Actual_File_Name = resultSet.getString("Actual_File_Name") Number_Of_Runs = resultSet.getString("Number_Time_File_Run") Time_1 = resultSet.getInt("Embeded_date_Time_Unix_1") Time_2 = resultSet.getInt("Embeded_date_Time_Unix_2") Time_3 = resultSet.getInt("Embeded_date_Time_Unix_3") Time_4 = resultSet.getInt("Embeded_date_Time_Unix_4") Time_5 = resultSet.getInt("Embeded_date_Time_Unix_5") Time_6 = resultSet.getInt("Embeded_date_Time_Unix_6") Time_7 = resultSet.getInt("Embeded_date_Time_Unix_7") Time_8 = resultSet.getInt("Embeded_date_Time_Unix_8") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, Prefetch_File_Name) for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) art = file.newArtifact(artID_pf) #self.log(Level.INFO, "Attribute Number ==>" + str(attID_pf_fn) + " " + str(attID_pf_an) ) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_pf_fn, ParsePrefetchDbIngestModuleFactory.moduleName, Prefetch_File_Name)), \ (BlackboardAttribute(attID_pf_an, ParsePrefetchDbIngestModuleFactory.moduleName, Actual_File_Name)), \ (BlackboardAttribute(attID_nr, ParsePrefetchDbIngestModuleFactory.moduleName, Number_Of_Runs)), \ (BlackboardAttribute(attID_ex1, ParsePrefetchDbIngestModuleFactory.moduleName, Time_1)), \ (BlackboardAttribute(attID_ex2, ParsePrefetchDbIngestModuleFactory.moduleName, Time_2)), \ (BlackboardAttribute(attID_ex3, ParsePrefetchDbIngestModuleFactory.moduleName, Time_3)), \ (BlackboardAttribute(attID_ex4, ParsePrefetchDbIngestModuleFactory.moduleName, Time_4)), \ (BlackboardAttribute(attID_ex5, ParsePrefetchDbIngestModuleFactory.moduleName, Time_5)), \ (BlackboardAttribute(attID_ex6, ParsePrefetchDbIngestModuleFactory.moduleName, Time_6)), \ (BlackboardAttribute(attID_ex7, ParsePrefetchDbIngestModuleFactory.moduleName, Time_7)), \ (BlackboardAttribute(attID_ex8, ParsePrefetchDbIngestModuleFactory.moduleName, Time_8)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) #Clean up prefetch directory and files for file in files: try: os.remove(os.path.join(Temp_Dir, file.getName())) except: self.log(Level.INFO, "removal of prefetch file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of prefetch directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Prefetch Analyzer", " Prefetch Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); Temp_Dir = Case.getCurrentCase().getTempDirectory() fileManager = Case.getCurrentCase().getServices().getFileManager() message_desc = '' for Plist_Files in self.List_Of_DBs: files = fileManager.findFiles(dataSource, Plist_Files) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "\\" + \ file.getName() + "-" + str(file.getId()) + " " + Temp_Dir + "\\Plist_File-" + str(file.getId()) + ".db3 ") pipe = Popen([self.path_to_exe, os.path.join(Temp_Dir, (file.getName() + "-" + str(file.getId()))), \ os.path.join(Temp_Dir, ("Plist_File-" + str(file.getId()) + ".db3"))], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if 'not a valid Plist' in out_text: message_desc = message_desc + "Error Parsing plist file " + file.getName() + ". File not parsed \n" else: extDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "Plist_File-" + str(file.getId()) + ".db3") #self.log(Level.INFO, "Path the sqlite database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extDbPath + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name, type from SQLITE_MASTER where type in ('table','view');") #self.log(Level.INFO, "query SQLite Master table") #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet.getString("tbl_name") + " ") # while resultSet4.next(): row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_" + file.getName() artifact_desc = "Plist " + file.getName() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_plist = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_plist = skCase.getArtifactTypeID(artifact_name) artID_plist_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_PLIST_" + resultSet2.getString("name").upper() #self.log(Level.INFO, "attribure id for " + attribute_name + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_plist) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_PLIST_" + Column_Names[Column_Number - 1] #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParsePlists2DBDelRecIngestModuleFactory.moduleName, \ artID_plist_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Clean up stmt.close() dbConn.close() os.remove(os.path.join(Temp_Dir, "Plist_File-" + str(file.getId()) + ".db3")) os.remove(os.path.join(Temp_Dir, file.getName() + "-" + str(file.getId()))) # After all databases, post a message to the ingest messages in box. if len(message_desc) == 0: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", " Plist files have been parsed " ) IngestServices.getInstance().postMessage(message) else: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", message_desc + " Plist files have been parsed with the above files failing " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_jl_ad = skCase.addArtifactType( "TSK_JL_AD", "Jump List Auto Dest") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") try: attID_jl_fn = skCase.addArtifactAttributeType("TSK_JLAD_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "JumpList File Name") except: self.log(Level.INFO, "Attributes Creation Error, JL AD File Name. ==> ") try: attID_jl_fg = skCase.addArtifactAttributeType("TSK_JLAD_FILE_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Description") except: self.log(Level.INFO, "Attributes Creation Error, File Description. ==> ") try: attID_jl_in = skCase.addArtifactAttributeType("TSK_JLAD_ITEM_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Item Name") except: self.log(Level.INFO, "Attributes Creation Error, Item Name. ==> ") try: attID_jl_cl = skCase.addArtifactAttributeType("TSK_JLAD_COMMAND_LINE_ARGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Command Line Args") except: self.log(Level.INFO, "Attributes Creation Error, Command Line Arguments. ==> ") try: attID_jl_dt = skCase.addArtifactAttributeType("TSK_JLAD_Drive Type", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Type") except: self.log(Level.INFO, "Attributes Creation Error, Drive Type. ==> ") try: attID_jl_dsn = skCase.addArtifactAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Serial Number") except: self.log(Level.INFO, "Attributes Creation Error, Drive Serial Number. ==> ") try: attID_jl_des = skCase.addArtifactAttributeType("TSK_JLAD_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Description") except: self.log(Level.INFO, "Attributes Creation Error, Description. ==> ") try: attID_jl_evl = skCase.addArtifactAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Env Var Location") except: self.log(Level.INFO, "Attributes Creation Error, Env Var Location. ==> ") try: attID_jl_fat = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ACCESS_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Access Time") except: self.log(Level.INFO, "Attributes Creation Error, File Access Time. ==> ") try: attID_jl_faf = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Attribute Flags") except: self.log(Level.INFO, "Attributes Creation Error, File Attribute Flags. ==> ") try: attID_jl_fct = skCase.addArtifactAttributeType("TSK_JLAD_FILE_CREATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Creation Time") except: self.log(Level.INFO, "Attributes Creation Error, File Creation Time. ==> ") try: attID_jl_fmt = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Modification Time") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_fs = skCase.addArtifactAttributeType("TSK_JLAD_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_jl_ic = skCase.addArtifactAttributeType("TSK_JLAD_ICON_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Icon Location") except: self.log(Level.INFO, "Attributes Creation Error, Icon Location. ==> ") try: attID_jl_ltid = skCase.addArtifactAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Link Target Identifier Data") except: self.log(Level.INFO, "Attributes Creation Error, Link Target Identifier Data. ==> ") try: attID_jl_lp = skCase.addArtifactAttributeType("TSK_JLAD_LOCAL_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Local Path") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_mi = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Machine Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Machine Identifier. ==> ") try: attID_jl_np = skCase.addArtifactAttributeType("TSK_JLAD_NETWORK_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Network Path") except: self.log(Level.INFO, "Attributes Creation Error, Network Path. ==> ") try: attID_jl_rp = skCase.addArtifactAttributeType("TSK_JLAD_RELATIVE_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Relative Path") except: self.log(Level.INFO, "Attributes Creation Error, Relative Path. ==> ") try: attID_jl_vl = skCase.addArtifactAttributeType("TSK_JLAD_VOLUME_LABEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Volume Label") except: self.log(Level.INFO, "Attributes Creation Error, Volume Label. ==> ") try: attID_jl_wc = skCase.addArtifactAttributeType("TSK_JLAD_WORKING_DIRECTORY", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Working Directory") except: self.log(Level.INFO, "Attributes Creation Error, Working Directory. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") artID_jl_ad_evt = skCase.getArtifactType("TSK_JL_AD") attID_jl_fn = skCase.getAttributeType("TSK_JLAD_FILE_NAME") attID_jl_fg = skCase.getAttributeType("TSK_JLAD_FILE_DESCRIPTION") attID_jl_in = skCase.getAttributeType("TSK_JLAD_ITEM_NAME") attID_jl_cl = skCase.getAttributeType("TSK_JLAD_COMMAND_LINE_ARGS") attID_jl_dt = skCase.getAttributeType("TSK_JLAD_Drive Type") attID_jl_dsn = skCase.getAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER") attID_jl_des = skCase.getAttributeType("TSK_JLAD_DESCRIPTION") attID_jl_evl = skCase.getAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION") attID_jl_fat = skCase.getAttributeType("TSK_JLAD_FILE_ACCESS_TIME") attID_jl_faf = skCase.getAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS") attID_jl_fct = skCase.getAttributeType("TSK_JLAD_FILE_CREATION_TIME") attID_jl_fmt = skCase.getAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME") attID_jl_fs = skCase.getAttributeType("TSK_JLAD_FILE_SIZE") attID_jl_ic = skCase.getAttributeType("TSK_JLAD_ICON_LOCATION") attID_jl_ltid = skCase.getAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA") attID_jl_lp = skCase.getAttributeType("TSK_JLAD_LOCAL_PATH") attID_jl_mi = skCase.getAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER") attID_jl_np = skCase.getAttributeType("TSK_JLAD_NETWORK_PATH") attID_jl_rp = skCase.getAttributeType("TSK_JLAD_RELATIVE_PATH") attID_jl_vl = skCase.getAttributeType("TSK_JLAD_VOLUME_LABEL") attID_jl_wd = skCase.getAttributeType("TSK_JLAD_WORKING_DIRECTORY") #self.log(Level.INFO, "Artifact id for TSK_PREFETCH ==> " + str(artID_pf)) # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "JL_AD") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "JL_AD Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + Temp_Dir + "\JL_AD.db3") output = subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "JL_AD.db3"), self.path_to_app_id_db], stdout=subprocess.PIPE).communicate()[0] #self.log(Level.INFO, "Output for the JL_AD program ==> " + output) self.log(Level.INFO, " Return code is ==> " + output) # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Temp_Dir, "JL_AD.db3") self.log(Level.INFO, "Path to the JL_AD database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") for file in files: file_name = os.path.splitext(file.getName())[0] self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "select File_Name, File_Description, Item_Name, command_line_arguments, drive_type, drive_serial_number, " + \ " description, environment_variables_location, file_access_time, file_attribute_flags, file_creation_time, " + \ " file_modification_time, file_size, icon_location, link_target_identifier_data, local_path, " + \ " machine_identifier, network_path, relative_path, volume_label, working_directory " + \ " from Automatic_destinations_JL where upper(File_Name) = upper('" + file_name + "');" # " from Automatic_destinations_JL where File_Name||'.automaticDestinations-ms' = '" + file_name + "';" #self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # self.log(Level.INFO, "Result (" + resultSet.getString("File_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Recovered_Record") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Computer_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier_Qualifiers") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Level") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Offset") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Source_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_User_Security_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time_Epoch") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Detail_Text") + ")") File_Name = resultSet.getString("File_Name") File_Description = resultSet.getString("File_Description") Item_Name = resultSet.getString("Item_Name") Command_Line_Arguments = resultSet.getString("command_line_arguments") Drive_Type = resultSet.getInt("drive_type") Drive_Serial_Number = resultSet.getInt("drive_serial_number") Description = resultSet.getString("description") Environment_Variables_Location = resultSet.getString("environment_variables_location") File_Access_Time = resultSet.getString("file_access_time") File_Attribute_Flags = resultSet.getInt("file_attribute_flags") File_Creation_Time = resultSet.getString("file_creation_time") File_Modification_Time = resultSet.getString("file_modification_time") File_Size = resultSet.getInt("file_size") Icon_Location = resultSet.getString("icon_location") Link_Target_Identifier_Data = resultSet.getString("link_target_identifier_data") Local_Path = resultSet.getString("local_path") Machine_Identifier = resultSet.getString("machine_identifier") Network_Path = resultSet.getString("network_path") Relative_Path = resultSet.getString("relative_path") Volume_Label = resultSet.getString("volume_label") Working_Directory = resultSet.getString("working_directory") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") #fileManager = Case.getCurrentCase().getServices().getFileManager() #files = fileManager.findFiles(dataSource, Prefetch_File_Name) #for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Not the proper way to do it but it will work for the time being. art = file.newArtifact(artID_jl_ad) # This is for when proper atributes can be created. art.addAttributes(((BlackboardAttribute(attID_jl_fn, JumpListADDbIngestModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_jl_fg, JumpListADDbIngestModuleFactory.moduleName, File_Description)), \ (BlackboardAttribute(attID_jl_in, JumpListADDbIngestModuleFactory.moduleName, Item_Name)), \ (BlackboardAttribute(attID_jl_cl, JumpListADDbIngestModuleFactory.moduleName, Command_Line_Arguments)), \ (BlackboardAttribute(attID_jl_dt, JumpListADDbIngestModuleFactory.moduleName, Drive_Type)), \ (BlackboardAttribute(attID_jl_dsn, JumpListADDbIngestModuleFactory.moduleName, Drive_Serial_Number)), \ (BlackboardAttribute(attID_jl_des, JumpListADDbIngestModuleFactory.moduleName, Description)), \ (BlackboardAttribute(attID_jl_evl, JumpListADDbIngestModuleFactory.moduleName, Environment_Variables_Location)), \ (BlackboardAttribute(attID_jl_fat, JumpListADDbIngestModuleFactory.moduleName, File_Access_Time)), \ (BlackboardAttribute(attID_jl_faf, JumpListADDbIngestModuleFactory.moduleName, File_Attribute_Flags)), \ (BlackboardAttribute(attID_jl_fct, JumpListADDbIngestModuleFactory.moduleName, File_Creation_Time)), \ (BlackboardAttribute(attID_jl_fmt, JumpListADDbIngestModuleFactory.moduleName, File_Modification_Time)), \ (BlackboardAttribute(attID_jl_fs, JumpListADDbIngestModuleFactory.moduleName, File_Size)), \ (BlackboardAttribute(attID_jl_ic, JumpListADDbIngestModuleFactory.moduleName, Icon_Location)), \ (BlackboardAttribute(attID_jl_ltid, JumpListADDbIngestModuleFactory.moduleName, Link_Target_Identifier_Data)), \ (BlackboardAttribute(attID_jl_lp, JumpListADDbIngestModuleFactory.moduleName, Local_Path)), \ (BlackboardAttribute(attID_jl_mi, JumpListADDbIngestModuleFactory.moduleName, Machine_Identifier)), \ (BlackboardAttribute(attID_jl_np, JumpListADDbIngestModuleFactory.moduleName, Network_Path)), \ (BlackboardAttribute(attID_jl_rp, JumpListADDbIngestModuleFactory.moduleName, Relative_Path)), \ (BlackboardAttribute(attID_jl_vl, JumpListADDbIngestModuleFactory.moduleName, Volume_Label)), \ (BlackboardAttribute(attID_jl_wd, JumpListADDbIngestModuleFactory.moduleName, Working_Directory)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) # Clean up skCase_Tran.commit() stmt.close() dbConn.close() try: os.remove(lclDbPath) except: self.log(Level.INFO, "Failed to remove the file " + lclDbPath) #skCase.close() #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of JL_AD file failed " + os.path.join(temp_dir, file.getName())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of JL_AD directory failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "JumpList AD", " JumpList AD Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "SYSTEM", "config") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Shimcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Shimcache Directory already exists " + Temp_Dir) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Saved File ==> " + lclDbPath) # Run the EXE, saving output to a sqlite database #try: # self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " -i " + Temp_Dir + "\\Shimcache\\" + \ # file.getName() + " -o " + Temp_Dir + "\\Shimcache_db.db3") # pipe = Popen([self.path_to_exe, "-i " + Temp_Dir + "\\Shimcache\\" + file.getName(), "-o " + Temp_Dir + \ # "\\Shimcache_db.db3"], stdout=PIPE, stderr=PIPE) self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "//Shimcache//" + \ file.getName() + " " + Temp_Dir + "//Shimcache_db.db3") pipe = Popen([self.path_to_exe, os.path.join(temp_dir, file.getName()), os.path.join(temp_dir, "Shimcache_db.db3")], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) #except: # self.log(Level.INFO, "Error running program shimcache_parser.") # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "Shimcache_db.db3") self.log(Level.INFO, "Path the system database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for system table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_shim = skCase.addArtifactType("TSK_SHIMCACHE", "Shimcache") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_shim = skCase.getArtifactTypeID("TSK_SHIMCACHE") artID_shim_evt = skCase.getArtifactType("TSK_SHIMCACHE") # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_shim) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_SHIMCACHE_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log(Level.INFO, "Error getting values from Shimcache table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseShimcacheIngestModuleFactory.moduleName, artID_shim_evt, None)) #Clean up EventLog directory and files try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of Shimcache tempdb failed " + lclDbPath) for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of Shimcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Shimcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Shimcache Parser", " Shimcache Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def parse_sqlite_data(self, dataSource, progressBar, os_version, mac_os_art_id, settings_db): # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) macos_recents.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the history_visits table in the database and get all columns. try: stmt = dbConn.createStatement() macos_version_sql = "select mac_osx_art_id, mac_osx_art_type, mac_osx_art_File_Name, mac_osx_art_dir_name, " + \ " mac_osx_art_database_name, mac_osx_art_sql_statement, os_version, " + \ " os_name from mac_artifact a, os_version b where a.os_id = b.os_id and b.os_version = '" + os_version + "'" + \ " and mac_osx_art_id = " + str(mac_os_art_id) + ";" self.log(Level.INFO, macos_version_sql) resultSet = stmt.executeQuery(macos_version_sql) self.log(Level.INFO, "query recent version table") except SQLException as e: self.log(Level.INFO, "Error querying database for recent version (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Get the artifact name and create it. try: stmt_2 = dbConn.createStatement() artifact_sql = "select distinct autopsy_art_type, autopsy_art_name, autopsy_art_description " + \ " from autopsy_artifact a, Art_att_mac_xref b where a.autopsy_art_id = b.autopsy_art_id " + \ " and b.mac_osx_art_id = " + resultSet.getString("mac_osx_art_id") + ";" resultSet_art = stmt_2.executeQuery(artifact_sql) self.log(Level.INFO, "Artifact Type (" + resultSet_art.getString("autopsy_art_type") + ")") if resultSet_art.getString("autopsy_art_type") != 'AUTOPSY': try: self.log(Level.INFO, "Begin Create New Artifacts ==> " + resultSet_art.getString("autopsy_art_name")) artID_art = skCase.addArtifactType( resultSet_art.getString("autopsy_art_name"), \ resultSet_art.getString("autopsy_art_desctiption")) self.artifact_name = resultSet_art.getString("autopsy_art_name") except: self.log(Level.INFO, "Artifacts Creation Error, artifact " + resultSet_art.getString("autopsy_art_name") + " exists. ==> ") else: self.artifact_name = resultSet_art.getString("autopsy_art_name") # Get the attribute types and create them stmt_3 = dbConn.createStatement() attribute_sql = "select distinct autopsy_attrib_type, autopsy_attrib_name, autopsy_attrib_desc, autopsy_attrib_value_type_desc " + \ " from autopsy_attribute a, Art_att_mac_xref b, autopsy_value_type c " + \ " where a.autopsy_attrib_id = b.autopsy_attrib_id and a.autopsy_attrib_value_type = c.autopsy_attrib_value_type " + \ " and b.mac_osx_art_id =" + resultSet.getString("mac_osx_art_id") + ";" self.log(Level.INFO, "Attribute SQL ==> " + attribute_sql) resultSet_att = stmt_3.executeQuery(attribute_sql) while resultSet_att.next(): if resultSet_att.getString("autopsy_attrib_type") == 'CUSTOM': if resultSet_att.getString("autopsy_attrib_value_type_desc") == 'String': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Integer': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Long': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Double': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Byte': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") else: try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") except SQLException as e: self.log(Level.INFO, "Error querying database for artifacts/attributes (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): # Set the database to be read to the once created by the prefetch parser program macos_file_name = resultSet.getString("mac_osx_art_File_Name") macos_dir_name = resultSet.getString("mac_osx_art_dir_name") macos_database_name = resultSet.getString("mac_osx_art_database_name") #macos_table_name = resultSet.getString("mac_osx_art_table_name") #self.path_to_plist_exe = os.path.join(os.path.dirname(os.path.abspath(__file__)), resultSet.getString("mac_osx_art_exec_file")) fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, macos_file_name + "%", macos_dir_name) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; all_files = [] # do this since we want to get the wal or journal files associated with the SQLite database but we want to # make sure we have them to use if numFiles > 1: for file in files: if file.getName() == macos_file_name: self.log(Level.INFO, file.getParentPath()) all_files.append(file) files_to_process = all_files # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\macos_recent") except: self.log(Level.INFO, "macos_recent Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory file_id = 0 for file in files: #self.log(Level.INFO, str(file)) # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions, also add file id to wal and journal files # if needed so that it can use the journals. self.log(Level.INFO, "File Name ==> " + file.getName() + " <==> " + macos_database_name) if file.getName().upper() == macos_database_name.upper(): file_id = file.getId() self.log(Level.INFO, "File Name ==> " + file.getName() + " <==> " + macos_database_name + " <++> " + str(file.getId())) lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + file.getName()) self.log(Level.INFO, " Database name ==> " + lclDbPath) ContentUtils.writeToFile(file, File(lclDbPath)) else: lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + file.getName()) self.log(Level.INFO, " Database name ==> " + lclDbPath) ContentUtils.writeToFile(file, File(lclDbPath)) lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + macos_database_name) lclFilePath = os.path.join(Temp_Dir + "\macos_recent", macos_file_name) self.log(Level.INFO, " Database name ==> " + lclDbPath + " File Path ==> " + lclFilePath) for file in files_to_process: # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory() + "\macos_recent", str(file.getId()) + "-" + macos_database_name) self.log(Level.INFO, "Path the Safari History.db database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the history_visits table in the database and get all columns. try: stmt_1 = dbConn.createStatement() macos_recent_sql = resultSet.getString("mac_osx_art_sql_statement") self.log(Level.INFO, macos_recent_sql) resultSet_3 = stmt_1.executeQuery(macos_recent_sql) self.log(Level.INFO, "query " + macos_database_name + " table") except SQLException as e: self.log(Level.INFO, "Error querying database for history table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK artID_hst = skCase.getArtifactTypeID(self.artifact_name) artID_hst_evt = skCase.getArtifactType(self.artifact_name) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range (1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType(col_name) self.log(Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log(Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getReal(col_name))) except: self.log(Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log(Level.INFO, "Error getting values from web_history table (" + e.getMessage() + ")") IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseMacOS_RecentIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_3.close() stmt_2.close() stmt_1.close() stmt.close() dbConn.close() # Clean up os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\macos_recent\\" + file.getName()) except: self.log(Level.INFO, "removal of Safari History file failed " + Temp_Dir + "\\macos_recent" + file.getName()) try: os.rmdir(Temp_Dir + "\\macos_recent") except: self.log(Level.INFO, "removal of Safari History directory failed " + Temp_Dir)
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") head, tail = os.path.split(os.path.abspath(__file__)) settings_db = head + "\\Macos_recents.db3" # Run this first to get the version of the OS to pass to the rest of the program self.parse_plist_data(dataSource, progressBar, 'All', 1, settings_db) self.log(Level.INFO, "MacOS Version is ===> " + self.os_version + " < == ") # get rid of minor revision number if self.os_version.count('.') > 1: position = 0 count = 0 for c in self.os_version: position = position + 1 if c == '.': count = count + 1 if count > 1: break self.os_version = self.os_version[:position - 1] #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) macos_recents.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the artifact table in the database and get all columns. try: stmt = dbConn.createStatement() process_data_sql = "select mac_osx_art_id, mac_osx_art_type, os_version from mac_artifact a, os_version b " + \ " where a.os_id = b.os_id and b.os_version = '10.12' and mac_osx_art_id > 1;" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "query mac_artifact table") except SQLException as e: self.log(Level.INFO, "Error querying database for mac_artifact (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): if resultSet.getString("mac_osx_art_type") == "Plist": self.parse_plist_data(dataSource, progressBar, resultSet.getString("os_version"), resultSet.getString("mac_osx_art_id"), \ settings_db) else: self.parse_sqlite_data(dataSource, progressBar, resultSet.getString("os_version"), resultSet.getString("mac_osx_art_id"), \ settings_db) self.log(Level.INFO, "MacOS Version is ===> " + self.os_version + " < == ") self.log(Level.INFO, "ending process, Just before call to parse_safari_history") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "WebcacheV01.dat") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Webcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Webcache Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId())) DbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, file.getName() + ' ==> ' + str(file.getId()) + ' ==> ' + file.getUniquePath()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + DbPath) #subprocess.Popen([self.path_to_exe, lclDbPath, DbPath]).communicate()[0] pipe = Popen([self.path_to_exe, lclDbPath, DbPath], stdout=PIPE, stderr=PIPE, cwd=os.path.dirname(os.path.abspath(__file__))) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) for file in files: # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, "Path the Webcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select distinct container_name from all_containers;") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK Container_List = [] while resultSet.next(): Container_List.append(resultSet.getString("container_name")) #self.log(Level.INFO, "Number of containers ==> " + str(len(Container_List)) + " ==> " + str(Container_List)) #self.log(Level.INFO, "Number of containers ==> " + str(Container_List) # Cycle through each row and create artifacts for c_name in Container_List: try: container_name = c_name #self.log(Level.INFO, "Result (" + container_name + ")") #self.log(Level.INFO, "Result get information from table " + container_name + " ") SQL_String_1 = "Select * from all_containers where container_name = '" + container_name + "';" SQL_String_2 = "PRAGMA table_info('All_Containers')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_WC_" + container_name.upper() artifact_desc = "WebcacheV01 " + container_name.upper() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_web = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_web = skCase.getArtifactTypeID(artifact_name) artID_web_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_web) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ==> " + Column_Types[Column_Number - 1]) #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttrTypeID(c_name) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseWebcacheIngestModuleFactory.moduleName, artID_web_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up #stmt.close() #dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()))) os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3")) except: self.log(Level.INFO, "removal of Webcache file failed " + temp_dir + "\\" + file.getName() + "-" + str(file.getId())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Webcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Webcache Parser", " Webcache Has Been Parsed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() temp_dir = os.path.join(Temp_Dir, "Volatility") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Plaso Import Directory already exists " + Temp_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) #file_name = os.path.basename(self.path_to_storage_file) #self.log(Level.INFO, "File Name ==> " + file_name) #base_file_name = os.path.splitext(file_name)[0] #self.database_file = Temp_Dir + "\\volatility\\Plaso.db3" for file in files: self.log(Level.INFO, "File name to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) image_file = file.getLocalAbsPath() if image_file != None: self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) file_name = os.path.basename(file.getLocalAbsPath()) self.log(Level.INFO, "File Name ==> " + file_name) base_file_name = os.path.splitext(file_name)[0] self.database_file = os.path.join(temp_dir, base_file_name + ".db3") self.log(Level.INFO, "File Name ==> " + self.database_file) if self.isAutodetect: self.find_profile(image_file) if self.Profile == None: continue for plugin_to_run in self.Plugins: if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) pipe = Popen([self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC self.log(Level.INFO, "Path the volatility database file created ==> " + self.database_file) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.database_file) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + self.database_file + " (" + e.getMessage() + ")") try: exestmt = dbConn.createStatement() resultx = exestmt.execute('create table plugins_loaded_to_Autopsy (table_name text);') except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") # Query the database try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet1 = stmt.executeQuery("Select upper(tbl_name) tbl_name from SQLITE_MASTER where upper(tbl_name) " \ " not in (select table_name from plugins_loaded_to_Autopsy)" \ " and upper(tbl_name) <> 'PLUGINS_LOADED_TO_AUTOPSY';") # Cycle through each row and create artifacts while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts ==> " + resultSet1.getString("tbl_name")) artID_art = skCase.addArtifactType( "TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name, "Volatility " + \ resultSet1.getString("tbl_name") + " " + file_name) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") # Get the artifact and attributes artID_art = skCase.getArtifactTypeID("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) artID_art_evt = skCase.getArtifactType("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) try: self.log(Level.INFO, "Result (" + resultSet1.getString("tbl_name") + ")") table_name = resultSet1.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet1.getString("tbl_name") + " ") row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_VOL_" + table_name.upper() + "_" + file_name artID_sql = skCase.getArtifactTypeID(artifact_name) artID_sql_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_VOL_" + table_name + "_" + resultSet2.getString("name").upper() if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sql) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_VOL_" + table_name.upper() + "_" + Column_Names[Column_Number - 1] attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": if resultSet3.getString(Column_Number) == None: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, " ")) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getString(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName, \ artID_sql_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") try: # exestmt = createStatement() resultx = exestmt.execute("insert into plugins_loaded_to_Autopsy values ('" + table_name + "');") except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "VolatilitySettings", " VolatilitySettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "MacFSEvents") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "FSEvents Directory already exists " + temp_dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", ".fseventsd") numFiles = len(files) for file in files: #self.log(Level.INFO, "Files ==> " + file.getName()) if (file.getName() == "..") or (file.getName() == '.') or (file.getName() == 'fseventsd-uuid'): pass #self.log(Level.INFO, "Files ==> " + str(file)) else: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(filePath)) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) self.log(Level.INFO, "Running program ==> " + self.MacFSEvents_Executable + " -c Autopsy " + "-o " + temp_dir + \ " -s " + Temp_Dir + "\MacFSEvents") pipe = Popen([self.MacFSEvents_Executable, "-c", "Autopsy", "-o", temp_dir, "-s", temp_dir], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) database_file = os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite") #open the database to get the SQL and artifact info out of try: head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "fsevents_sql.db3") Class.forName("org.sqlite.JDBC").newInstance() dbConn1 = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt1 = dbConn1.createStatement() sql_statement1 = "select distinct artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts") artID_fse = skCase.addArtifactType( resultSet1.getString("artifact_name"), resultSet1.getString("artifact_title")) except: self.log(Level.INFO, "Artifacts Creation Error, " + resultSet1.getString("artifact_name") + " some artifacts may not exist now. ==> ") except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Create the attribute type, if it exists then catch the error try: attID_fse_fn = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fse_msk = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_MASK", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Mask") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_src = skCase.addArtifactAttributeType("TSK_FSEVENTS_SOURCE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source File") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_dte = skCase.addArtifactAttributeType("TSK_FSEVENTS_DATES", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Date(s)") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite")) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #artID_fse = skCase.getArtifactTypeID("TSK_MACOS_FSEVENTS") #artID_fse_evt = skCase.getArtifactType("TSK_MACOS_FSEVENTS") artID_fse = skCase.getArtifactTypeID("TSK_MACOS_ALL_FSEVENTS") artID_fse_evt = skCase.getArtifactType("TSK_MACOS_ALL_FSEVENTS") attID_fse_fn = skCase.getAttributeType("TSK_FSEVENTS_FILE_NAME") attID_fse_msk = skCase.getAttributeType("TSK_FSEVENTS_FILE_MASK") attID_fse_src = skCase.getAttributeType("TSK_FSEVENTS_SOURCE") attID_fse_dte = skCase.getAttributeType("TSK_FSEVENTS_DATES") # Query the database for file in files: if ('slack' in file.getName()): pass elif (file.getName() == '..') or (file.getName() == '.'): pass else: stmt1 = dbConn1.createStatement() sql_statement1 = "select sql_statement, artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: artID_fse = skCase.getArtifactTypeID(resultSet1.getString("artifact_name")) artID_fse_evt = skCase.getArtifactType(resultSet1.getString("artifact_name")) try: stmt = dbConn.createStatement() sql_statement = resultSet1.getString("sql_statement") + " and source like '%" + file.getName() + "';" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet = stmt.executeQuery(sql_statement) #self.log(Level.INFO, "query SQLite Master table ==> " ) #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifact while resultSet.next(): # Add the attributes to the artifact. art = file.newArtifact(artID_fse) #self.log(Level.INFO, "Result ==> " + resultSet.getString("mask") + ' <==> ' + resultSet.getString("source")) art.addAttributes(((BlackboardAttribute(attID_fse_fn, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("filename"))), \ (BlackboardAttribute(attID_fse_msk, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("mask"))), \ (BlackboardAttribute(attID_fse_src, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("source"))), \ (BlackboardAttribute(attID_fse_dte, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("OTHER_DATES"))))) #try: # index the artifact for keyword search #blackboard.indexArtifact(art) #except: #self.log(Level.INFO, "Error indexing artifact " + art.getDisplayName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") try: stmt.close() except: self.log(Level.INFO, "Error closing statement for " + file.getName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(MacFSEventsIngestModuleFactory.moduleName, artID_fse_evt, None)) try: stmt.close() dbConn.close() stmt1.close() dbConn1.close() #os.remove(Temp_Dir + "Autopsy_FSEvents-EXCEPTIONS_LOG.txt") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records.tsv") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records_DB.sqlite") shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of MacFSEvents imageinfo database failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "MacFSEventsSettings", " MacFSEventsSettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # Setup SQL Statements and other variables artifactSQL = 'select distinct "artifact_type_name:" a1, art_type.type_name a2, "artifact_display_name:" b1, art_type.display_name b2, ' + \ ' "datasource_obj_id:" c1, img_name.obj_id c2, "datasource_name:" d1, img_name.name d2, art_type.type_name e1 from blackboard_artifact_types art_type, ' + \ ' tsk_image_names img_name, blackboard_artifacts art, blackboard_attributes att where img_name.obj_id = art.data_source_obj_id ' + \ ' and img_name.sequence = 0 and art.artifact_type_id = art_type.artifact_type_id and att.artifact_id = art.artifact_id ' + \ ' and att.artifact_id = ' artifactSQL2 = 'select att_type.display_name date_type, case att.value_type when 0 then value_text when 1 then value_int32 when 2 then value_int64 ' +\ ' when 3 then value_double when 4 then value_byte when 5 then value_int64 end date_value from blackboard_attributes att, ' + \ ' blackboard_attribute_types att_type where att_type.attribute_type_id = att.attribute_type_id and att.value_type = 5 ' + \ ' and att.artifact_id = ' artifactSQL3 = 'select att_type.display_name name, case att.value_type when 0 then value_text when 1 then value_int32 when 2 then value_int64 ' +\ ' when 3 then value_double when 4 then value_byte when 5 then value_int64 end value from blackboard_attributes att, ' + \ ' blackboard_attribute_types att_type where att_type.attribute_type_id = att.attribute_type_id and att.value_type <> 5 ' + \ ' and att.artifact_id = ' sketchName = self.sketchName sketchDescription = self.sketchDescription timelineName = sketchName + "_Timeline" timelineIndex = sketchName + "_Index" jsonFileName = "Autopsy.jsonl" skCase = Case.getCurrentCase().getSleuthkitCase() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing tempDirectory = Case.getCurrentCase().getTempDirectory() tempDir = os.path.join(tempDirectory, "Timesketch") self.log(Level.INFO, "create Directory " + tempDir) try: os.mkdir(tempDir) except: self.log(Level.INFO, "Timesketch directory already exists" + tempDir) artList = [] dbquery = skCase.executeQuery("select distinct artifact_id from blackboard_attributes where value_type = 5;") resultSet = dbquery.getResultSet() while resultSet.next(): artifactDict = {} dbquery2 = skCase.executeQuery(artifactSQL + resultSet.getString("artifact_id")) resultSet2 = dbquery2.getResultSet() while resultSet2.next(): artifactDict[resultSet2.getString("a1")] = resultSet2.getString("a2") artifactDict[resultSet2.getString("b1")] = resultSet2.getString("b2") artifactDict[resultSet2.getString("c1")] = resultSet2.getString("c2") artifactDict[resultSet2.getString("d1")] = resultSet2.getString("d2") dbquery3 = skCase.executeQuery(artifactSQL2 + resultSet.getString("artifact_id")) resultSet3 = dbquery3.getResultSet() while resultSet3.next(): artifactDict[resultSet3.getString("date_type")] = resultSet3.getString("date_value") artifactDict["message"] = resultSet2.getString("e1") artifactDict["timestamp_desc"] = resultSet3.getString("date_type") artifactDict["timestamp"] = resultSet3.getString("date_value") dbquery4 = skCase.executeQuery(artifactSQL3 + resultSet.getString("artifact_id")) resultSet4 = dbquery4.getResultSet() while resultSet4.next(): artValue = resultSet4.getString("value") artName = resultSet4.getString("name") if isinstance(artValue, unicode): #print (artValue) artifactDict[artName] = artValue.translate({0x2014: None}) else: artifactDict[artName] = artValue dbquery4.close() dbquery3.close() dbquery2.close() artList.append(artifactDict) dbquery.close() jsonFileNamePath = os.path.join(tempDir, jsonFileName) with open(jsonFileNamePath, 'a') as f: for art in artList: json.dump(art, f) f.write("\n") # Get file Times fileList = [] numFiles = 0 dbquery = skCase.executeQuery("Select obj_id from tsk_files") resultSet = dbquery.getResultSet() while resultSet.next(): dbquery2 = skCase.executeQuery("Select ctime, crtime, atime, mtime, parent_path||name from tsk_files where obj_id = " + \ resultSet.getString("obj_id")) resultSet2 = dbquery2.getResultSet() meta = resultSet2.getMetaData() columnCount = meta.getColumnCount() column_names = [] # ?? Do I need this while resultSet2.next(): for i in range (1,int(columnCount)): fileDict = {} if resultSet2.getString(i) is None: fileDict[meta.getColumnLabel(i)] = "" fileDict["message"] = "TSK : " fileDict["timestamp"] = 0 else: fileDict[meta.getColumnLabel(i)] = resultSet2.getString(i) fileDict["message"] = "TSK : " + resultSet2.getString(5) fileDict["timestamp"] = resultSet2.getString(i) fileDict["timestamp_desc"] = meta.getColumnLabel(i) dbquery3 = skCase.executeQuery("Select * from tsk_files where obj_id = " + resultSet.getString("obj_id")) resultSet3 = dbquery3.getResultSet() meta3 = resultSet3.getMetaData() columnCount3 = meta3.getColumnCount() while resultSet3.next(): for x in range(1,int(columnCount3)): colHead = meta3.getColumnLabel(x) if (('ctime' in colHead) or ('crtime' in colHead) or ('atime' in colHead) or ('mtime' in colHead)): #self.log(Level.INFO, "Heading ==> " + colHead ) pass else: if resultSet3.getString(x) is None: fileDict[colHead] = "" else: fileDict[colHead] = resultSet3.getString(x) dbquery3.close() fileList.append(fileDict) numFiles = numFiles + 1 if numFiles > 1000: with open(jsonFileNamePath, 'a') as f: for file in fileList: json.dump(file, f) f.write("\n") numFiles = 0 fileList = [] dbquery2.close() dbquery.close() # Check Messages # TS001 - Invalid arguments # TS002 - Sketch Created # TS003 - Sketch Already Exists # TS004 - Error Looking up Sketch # TS005 - Timeline Added # TS006 - Timeline Not Created # Try to run this 3 times in case you add a sketch but for some reason you fail to add a the timeline, # you may be able to add the timeline on another run, no reason to make the user run this multple times # when we can do that as well. emessage = "Internal Error contact plugin maker" for z in range(3): self.log(Level.INFO, "command ==> " + self.path_to_Timesketch_exe + " " + sketchName + " " + jsonFileNamePath + " " + self.IP_Address + " " + self.Port_Number + " " + self.userName + " " + self.password) pipe = Popen([self.path_to_Timesketch_exe, sketchName, jsonFileNamePath, self.IP_Address, self.Port_Number, self.userName, self.password], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if "TS005" in out_text: if "TS002" in out_text: emessage = "Sketch added, Timeline added" break elif "TS003" in out_text: emessage = "Sketch already exists, Timeline added" break elif "TS001" in out_text: emessage = "invalid parameters passed in, missing parameters" break elif "TS006" in out_text: if "TSK004" in out_text: emessage = "Error Looking up sketch, Timeline Not Created" message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Timesketch File Submit", emessage ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() thumb_files = fileManager.findFiles(dataSource, "thumbcache_%.db", "") numFiles = len(thumb_files) self.log(Level.INFO, "Number of Thumbs.db files found ==> " + str(numFiles)) # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() tmp_dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Thumbcache") except: self.log(Level.INFO, "Thumbcache directory already exists " + Temp_Dir) for thumb_file in thumb_files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + thumb_file.getName()) #fileCount += 1 out_dir = os.path.join(Temp_Dir + "\Thumbcache", str(thumb_file.getId()) + "-" + thumb_file.getName()) try: os.mkdir(Temp_Dir + "\Thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName()) except: self.log(Level.INFO, str(thumb_file.getId()) + "-" + thumb_file.getName() + " Directory already exists " + Temp_Dir) # Save the thumbs.DB locally in the ModuleOutput folder. use file id as name to reduce collisions lclDbPath = os.path.join(tmp_dir, str(thumb_file.getId()) + "-" + thumb_file.getName()) ContentUtils.writeToFile(thumb_file, File(lclDbPath)) # Run thumbs_viewer against the selected Database self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_thumbs + " -O " + out_dir + " " + lclDbPath) pipe = Popen([self.path_to_exe_thumbs, "-O", out_dir, lclDbPath], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Get the parent abstract file Information abstract_file_info = skCase.getAbstractFileById(thumb_file.getId()) #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info)) files = next(os.walk(out_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(out_dir, file) local_file = os.path.join("ModuleOutput\\thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName(), file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_dervived_existance(dataSource, file, abstract_file_info)): # Add dervived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type dervived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, abstract_file_info, "", "thumbcache_viewer_cmd.exe", "1.0.3.4", "", TskData.EncodingType.NONE) #self.log(Level.INFO, "Derived File ==> " + str(dervived_file)) else: pass try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of thumbcache file " + lclDbPath + " failed " ) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Thumbcache", " Thumbcache Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() connectionFiles = fileManager.findFiles(dataSource, "Connection.log%", ".atomic") numFiles = len(connectionFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Atomic Wallet directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Atomic_Wallet") try: os.mkdir(temporaryDirectory) except: pass # get and process connections for file in connectionFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processConnectionLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # Get and process history file historyFiles = fileManager.findFiles(dataSource, "history.json", ".atomic") numFiles = len(historyFiles) for file in historyFiles: if "-slack" not in file.getName(): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processHistory(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) try: shutil.rmtree(temporaryDirectory) except: self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Facebook Chat", " Facebook Chat Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def shutDown(self): # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, SampleJythonFileIngestModuleFactory.moduleName, str(self.filesFound) + " files found") ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log(Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType( "TSK_EVTX_LOGS", "Windows Event Logs") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_BY_ID", "Windows Event Logs By Event Id") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") try: attID_ev_fn = skCase.addArtifactAttributeType("TSK_EVTX_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType("TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType("TSK_EVTX_COMPUTER_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log(Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> ") try: attID_ev_el = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_LEVEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType("TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log(Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType("TSK_EVTX_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType("TSK_EVTX_SOURCE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType("TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_BY_ID") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.evtx") # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "EventLogs") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Event Log Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source " + self.path_to_exe + " parm 1 ==> " + temp_dir + " Parm 2 ==> " + os.path.join(Temp_Dir,"\EventLogs.db3")) subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "EventLogs.db3")]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log(Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # files = [] # fileManager = Case.getCurrentCase().getServices().getFileManager() # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. if self.List_Of_Events[0] != 'ALL': try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " and Event_Identifier in ('" + self.Event_Id_List + "');" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString("Computer_Name") Event_Identifier = resultSet.getInt("Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString("Event_Source_Name") Event_User_Security_Identifier = resultSet.getString("Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString("Event_Detail_Text") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxByEventIDIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxByEventIDIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxByEventIDIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time_Epoch)) else: try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.log(Level.INFO, "This is the to see what the FU is") # Cycle through each row and create artifacts while resultSet_1.next(): try: self.log(Level.INFO, "This is the to see what the FU is 2") #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt("Event_Identifier") Event_ID_Count = resultSet_1.getInt("Number_Of_Events") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") self.log(Level.INFO, "This is the to see what the FU is 3") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log(Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up try: if self.List_Of_Events[0] != 'ALL': stmt.close() else: stmt_1.close() dbConn.close() os.remove(lclDbPath) except: self.log(Level.INFO, "Error closing the statment, closing the database or removing the file") #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir,file.getName())) except: self.log(Level.INFO, "removal of Event Log file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process Hiberfil.sys and Crash Dumps") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory if self.hiber_flag: Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() try: ModOut_Dir = os.path.join(Mod_Dir, "Volatility", "Memory-Image-hiberfil") self.log(Level.INFO, "Module Output Directory ===> " + ModOut_Dir) #dir_util.mkpath(ModOut_Dir) os.mkdir(os.path.join(Mod_Dir, "Volatility")) os.mkdir(ModOut_Dir) except: self.log(Level.INFO, "***** Error Module Output Directory already exists " + ModOut_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "hiberfil.sys", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) for file in files: self.log(Level.INFO, "File to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + file.getName()) tmp_Dir = Case.getCurrentCase().getTempDirectory() Hiber_File = os.path.join(tmp_Dir, file.getName()) ContentUtils.writeToFile(file, File(Hiber_File)) self.log(Level.INFO, "File name to process is ==> " + Hiber_File) # Create the directory to dump the hiberfil dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) pipe = Popen([self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Add hiberfil memory image to a new local data source services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") dir_list = [] dir_list.append(dump_file) # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager_2 = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager_2.addLocalFilesDataSource(str(device_id), "Hiberfile Memory Image", "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "HiberFil_Crash", " Hiberfil/Crash Dumps have been extracted fro Image. " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK