def analyze(self, dataSource, fileManager, context): try: self.current_case = Case.getCurrentCaseThrows() except NoCurrentCaseException as ex: self._logger.log(Level.WARNING, "No case currently open.", ex) self._logger.log(Level.WARNING, traceback.format_exc()) return try: absFiles = fileManager.findFiles(dataSource, "da_destination_history") if absFiles.isEmpty(): return for abstractFile in absFiles: try: jFile = File( self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile( abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Google map locations. pass
def analyze(self, dataSource, fileManager, context): try: global wwfAccountType wwfAccountType = Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().addAccountType( "WWF", "Words with Friends") absFiles = fileManager.findFiles(dataSource, "WordsFramework") for abstractFile in absFiles: try: jFile = File( Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile( abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findWWFMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding WWF messages. pass
def copyToTempFile(file): root, ext = os.path.splitext(file.getName()) tmpFilename = root + "-" + str(file.getId()) + ext tempDir = Case.getCurrentCase().getTempDirectory() tmpPath = os.path.join(tempDir, tmpFilename) ContentUtils.writeToFile(file, File(tmpPath)) return tmpPath
def extractFile(self, fileName, temporaryDirectory, fileManager, dataSource): files = fileManager.findFiles(dataSource, fileName) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") fileList = [] for file in files: #self.log(Level.INFO, 'Parent Path is ==> ' + file.getParentPath()) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK try: if (not ('PaxHeader' in file.getParentPath())): #self.log(Level.INFO, "Writing file ==> " + file.getName()) extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) fileList.append(file) except: self.log( Level.INFO, "Error writing File " + os.path.join(temporaryDirectory, file.getName())) return fileList
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process") skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing exportDirectory = Case.getCurrentCase().getExportDirectory() exportDir = os.path.join(exportDirectory, "Mass_Export") self.log(Level.INFO, "create Directory " + exportDir) try: os.mkdir(exportDir) except: self.log(Level.INFO, "Mass Export directory already exists" + exportDir) for fileExtension in self.extensionList: fileExt = fileExtension.strip() files = fileManager.findFiles(dataSource, "%." + fileExt) numFiles = len(files) self.log( Level.INFO, "found " + str(numFiles) + " files for extension ==> " + str(fileExtension)) expDir = os.path.join(exportDir, fileExt) try: os.mkdir(expDir) except: self.log(Level.INFO, "Directory already exists ==> " + str(expDir)) for file in files: #self.log(Level.INFO, 'Parent Path is ==> ' + file.getParentPath()) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Writing file ==> " + file.getName()) extractedFile = os.path.join( expDir, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) except: self.log( Level.INFO, "Error writing File " + os.path.join(temporaryDirectory, file.getName())) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Mass Export By Extension Complete", "Complete") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join( Case.getCurrentCase().getTempDirectory(), str(file.getId()) + "." + file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen( [self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath() + file.getName() + '\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() chatFiles = fileManager.findFiles(dataSource, "%.txt", "/Documents/RingCentral/Meetings") numFiles = len(chatFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create RingCentral directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "RingCentral") try: os.mkdir(temporaryDirectory) except: pass # get and write out chat meeting files for file in chatFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.chatMeetingLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # try: # shutil.rmtree(temporaryDirectory) # except: # self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "CentralRing", " CentralRing Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def writeBackupFiles(self, fileInfo, modDir, parentPath, fileManager, dataSource): for fInfo in fileInfo: files = fileManager.findFiles(dataSource, fInfo[1], parentPath) numFiles = len(files) for file in files: (head, tail) = os.path.split(fInfo[2]) ContentUtils.writeToFile( file, File(os.path.join(head, self.validFileName(tail))))
def _extract(self, content, path): children = content.getChildren() for child in children: childName = child.getName() childPath = os.path.join(path, childName) if childName == "." or childName == "..": continue elif child.isFile(): ContentUtils.writeToFile(child, File(childPath)) elif child.isDir(): os.mkdir(childPath) self._extract(child, childPath)
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "tc.db") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findTangoMessagesInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Error finding Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "tc.db") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findTangoMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Tango messages. pass
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId())+"."+file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen([self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath()+file.getName()+'\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "logs.db") absFiles.addAll(fileManager.findFiles(dataSource, "contacts.db")) absFiles.addAll(fileManager.findFiles(dataSource, "contacts2.db")) for abstractFile in absFiles: try: file = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, file, context.dataSourceIngestIsCancelled) self.__findCallLogsInDB(file.toString(), abstractFile, dataSource) except IOException as ex: self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Error finding call logs", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "da_destination_history") if absFiles.isEmpty(): return for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Google map locations. pass
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "logs.db") absFiles.addAll(fileManager.findFiles(dataSource, "contacts.db")) absFiles.addAll(fileManager.findFiles(dataSource, "contacts2.db")) for abstractFile in absFiles: try: file = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, file, context.dataSourceIngestIsCancelled) self.__findCallLogsInDB(file.toString(), abstractFile, dataSource) except IOException as ex: self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding call logs. pass
def analyze(self, dataSource, fileManager, context): try: abstractFiles = fileManager.findFiles(dataSource, "CachedGeoposition%.db") for abstractFile in abstractFiles: if abstractFile.getSize() == 0: continue try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing browser location files", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding browser location files. pass
def store_file(self, directory): """ Stores the file in the directory of the case (Usually in the directory 'temp') Args: directory: where to store the file. Returns: True: file stored. False: errors in storing the file. """ try: self.lclPath = os.path.join(directory, self.storedName) ContentUtils.writeToFile(self.file, File(self.lclPath)) except: return False else: return True
def analyze(self, dataSource, fileManager, context): try: abstractFiles = fileManager.findFiles(dataSource, "cache.cell") abstractFiles.addAll(fileManager.findFiles(dataSource, "cache.wifi")) for abstractFile in abstractFiles: if abstractFile.getSize() == 0: continue try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInFile(jFile, abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing cached location files", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Error finding cached location files", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "contacts.db") absFiles.addAll(fileManager.findFiles(dataSource, "contacts2.db")) if absFiles.isEmpty(): return for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findContactsInDB(str(jFile.toString()), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Contacts", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: self._logger.log(Level.SEVERE, "Error finding Contacts", ex) self._logger.log(Level.SEVERE, traceback.format_exc())
def analyze(self, dataSource, fileManager, context): try: global wwfAccountType wwfAccountType = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addAccountType("WWF", "Words with Friends") absFiles = fileManager.findFiles(dataSource, "WordsFramework") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findWWFMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding WWF messages. pass
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Look for files with extension docx, ,doc, pdf and txt extensions = ['docx','doc','pdf','txt','csv', 'ppt','pptx','rtf','html'] if (file.getNameExtension() in extensions): # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), FindTextFilesModuleFactory.moduleName, "Find text Files") # Create find text directory in module output directory, if it exists then continue on processing outpuDir = Case.getCurrentCase().getModulesOutputDirAbsPath() + "\TextFiles" self.log(Level.INFO, "create Directory " + outpuDir) try: os.mkdir(outpuDir) except: self.log(Level.INFO, "Find Text Directory already exists " + outpuDir) configFilesPath = os.path.join(outpuDir, str(file.getName())) ContentUtils.writeToFile(file, File(configFilesPath)) art.addAttribute(att) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(FindTextFilesModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)); return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): skCase = Case.getCurrentCase().getSleuthkitCase() current_dir = os.path.join(os.path.dirname(__file__)) # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFilesByMimeType(dataSource, ["audio/opus"]) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) file_type = file.getName().split(".") self.log(Level.INFO, "Type file: "+ file_type[-1]) self.log(Level.INFO, '#' * 20) self.log(Level.INFO, os.getcwd()) # Transcreve o arquivo de audio lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) credentials = os.path.join(current_dir, 'credentials_google.json') plugin = os.path.join(current_dir, 'cli_speech', 'linux', 'PluginSpeech', 'PluginSpeech') # for windows if platform.system == 'Windows': plugin = os.path.join(current_dir, 'cli_speech', 'windows', 'PluginSpeech', 'PluginSpeech.exe') response_speech = subprocess.Popen([plugin, "--input={}".format(lclDbPath), "--credentials={}".format(credentials)], stdout=subprocess.PIPE) result = response_speech.communicate() self.log(Level.INFO, result[0].decode('utf-8')) fileCount += 1 try: self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_AUDIO_TEXT") artID_sql = skCase.addArtifactType('TSK_AUDIO_{}'.format(file_type[-1]), 'Audios transcritos - {}'.format(file_type[-1])) except: self.log(Level.INFO, "Artifacts Creation Error, artifact TSK_AUDIO_TEXT exists. ==> ") try: attID_ex1 = skCase.addArtifactAttributeType('TSK_AUDIO_{}'.format(file_type[-1]), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, 'Texto transcrito de audio') except: self.log(Level.INFO, "Artifacts Attribute Type creation Error, artifact attribute type exists. ==> ") attID_ex1 = skCase.getAttributeType('TSK_AUDIO_{}'.format(file_type[-1])) artID_sql = skCase.getArtifactTypeID('TSK_AUDIO_{}'.format(file_type[-1])) artifact = file.newArtifact(artID_sql) artifact.addAttribute(BlackboardAttribute(attID_ex1, AudioSpeechTextModuleFactory.moduleName, result[0].decode('utf-8'))) try: blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + artifact.getDisplayName()) progressBar.progress(fileCount) os.remove(lclDbPath) #Post a message to the ingest messages in box. #message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, # "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) #IngestServices.getInstance().postMessage(message) #return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) Files_submitted = 0 try: os.mkdir(Temp_Dir + "\Cuckoo") except: self.log(Level.INFO, "Cuckoo Directory already exists " + Temp_Dir) for tag_name in self.tag_list: self.log(Level.INFO, "Processing Tag ==> " + tag_name) sql_statement = "select name, parent_path from tsk_files a, tag_names c, content_tags d " + \ " where d.tag_name_id = c.tag_name_id and c.display_name = '" + tag_name + "' and d.obj_id = a.obj_id;" self.log(Level.INFO, "SQL Statement ==> " + sql_statement) skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, resultSet.getString("name"), resultSet.getString("parent_path")) numFiles = len(files) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) # Save the File locally in the temp folder. FilePath = os.path.join(Temp_Dir + "\cuckoo", file.getName()) ContentUtils.writeToFile(file, File(FilePath)) # Call the Cuckoo API to submit the file pipe = Popen([self.path_to_cuckoo_exe, self.Protocol,self.IP_Address, self.Port_Number, "submit_file", FilePath ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, resultSet.getString("parent_path") + "\\" + resultSet.getString("name") + "<== Status of File Submit is " + out_text + " ==>") Files_submitted = Files_submitted + 1 #Submit error Message for message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Cuckoo File Submit", resultSet.getString("parent_path") + "/" + resultSet.getString("name") + " " + out_text ) IngestServices.getInstance().postMessage(message) #Delete File that was written try: os.remove(FilePath) except: self.log(Level.INFO, "removal of " + FilePath + " Failed ") dbquery.close() message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Cuckoo File Submit", str(Files_submitted) + " files have been submitted to cuckoo" ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): PostBoard = IngestServices.getInstance() progressBar.switchToIndeterminate() #Current case ccase = Case.getCurrentCase().getSleuthkitCase() blackboard = Case.getCurrentCase().getServices().getBlackboard() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "msapplication.xml") numFiles = len(files) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "About to analyze " + str(numFiles) + " files") PostBoard.postMessage(message) progressBar.switchToDeterminate(numFiles) try: #Try adding the Articaft Type artifact_name = "TSK_IETILES" artifact_desc = "IE Tiles Analyzer" artID_tiles = ccase.addArtifactType(artifact_name, artifact_desc) artID_tiles_evt = ccase.getArtifactType(artifact_name) attribute_name = "TSK_TILES_SITE" attribute_name2 = "TSK_TILES_DATE" attribute_name3 = "TSK_TILES_ACCESSDATE" attID_ex1 = ccase.addArtifactAttributeType( attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Site") attID_ex2 = ccase.addArtifactAttributeType( attribute_name2, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Date") attID_ex3 = ccase.addArtifactAttributeType( attribute_name3, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Access Date") except: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "Already created?") PostBoard.postMessage(message) fileCount = 0 for file in files: fileCount += 1 progressBar.progress(fileCount) progressBar.progress("IE Tiles Analyzer") if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "++++++Processing file: " + file.getName()) self.log(Level.INFO, "File count:" + str(fileCount)) lclXMLPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".xml") ContentUtils.writeToFile(file, File(lclXMLPath)) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", lclXMLPath) #PostBoard.postMessage(message) try: tree = ET.ElementTree(file=lclXMLPath) root = tree.getroot() for config in root.iter('site'): site = config.attrib.get('src') message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", site) PostBoard.postMessage(message) for dates in root.iter('accdate'): accessD = dates.text.split(",") AloValue = accessD[0] AhiValue = accessD[1] accessdate = CalculateTime(AloValue, AhiValue) for dates in root.iter('date'): createD = dates.text.split(",") CloValue = createD[0] ChiValue = createD[1] normaldate = CalculateTime(CloValue, ChiValue) if len(site) > 0: artifact_name = "TSK_IETILES" artifact_desc = "IE Tiles Analyzer" artID_tiles_evt = ccase.getArtifactType(artifact_name) artID_tiles = ccase.getArtifactTypeID(artifact_name) art = file.newArtifact(artID_tiles) attID_ex1 = ccase.getAttributeType("TSK_TILES_SITE") art.addAttribute( BlackboardAttribute( attID_ex1, IETilesIngestModuleFactory.moduleName, site)) attID_ex1 = ccase.getAttributeType("TSK_TILES_DATE") art.addAttribute( BlackboardAttribute( attID_ex1, IETilesIngestModuleFactory.moduleName, normaldate)) attID_ex1 = ccase.getAttributeType("TSK_TILES_ACCESSDATE") art.addAttribute( BlackboardAttribute( attID_ex1, IETilesIngestModuleFactory.moduleName, accessdate)) PostBoard.fireModuleDataEvent(ModuleDataEvent(IETilesIngestModuleFactory.moduleName, \ artID_tiles_evt, None)) else: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "No sites found: " + lclXMLPath) PostBoard.postMessage(message) except: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "SOMETHING WENT WRONG") PostBoard.postMessage(message) # Clean up os.remove(lclXMLPath) # After all XML files, post a message to the ingest messages in box. if numFiles == 0: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "Nothing to analyze ") PostBoard.postMessage(message) else: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "IE Tiles Analyzer", "Analyzed %d files" % fileCount) PostBoard.postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "fbsyncstore.db") numFiles = len(files) #self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "FB_Chat") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) #os.remove(extractedFile) # Get and process chats files = fileManager.findFiles(dataSource, "orca2.db") numFiles = len(files) #self.log(Level.INFO, "found " + str(numFiles) + " files") databaseFile = os.path.join(Case.getCurrentCase().getTempDirectory(), "Autopsy_Chat.db3") for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.log( Level.INFO, str(self.pathToExe) + " " + str(extractedFile) + " " + str(temporaryDirectory) + " " + str(databaseFile)) pipe = Popen([ self.pathToExe, extractedFile, temporaryDirectory, databaseFile ], stdout=PIPE, stderr=PIPE) outputFromRun = pipe.communicate()[0] self.processFbChat(databaseFile) self.processChats(skCase, file) #os.remove(extractedFile) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Facebook Chat", " Facebook Chat Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): moduleName = CarverFilesIngestModuleFactory.moduleName if len(self.List_Of_tables) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "FileCarver", " No Mime Types Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() if "All_Mime_Types" in self.List_Of_tables: files = fileManager.findFiles(dataSource, "%") if "Include_Slack_Space" in self.List_Of_tables: files=[i for i in files if (i.getSize() > 500)] numFiles = len(files) else: files=[i for i in files if (i.getSize() > 500) and not i.getName().endswith("-slack") ] numFiles = len(files) else: files = fileManager.findFilesByMimeType(self.mimeTypesToFind) if "Include_Slack_Space" in self.List_Of_tables: files=[i for i in files if (i.getSize() > 500)] numFiles = len(files) else: files=[i for i in files if (i.getSize() > 500) and not i.getName().endswith("-slack") ] numFiles = len(files) # if "Default_Mime_Types" in self.List_Of_tables: # files = fileManager.findFilesByMimeType(self.mimeTypesToFind) # if "Include_Slack_Space" in self.List_Of_tables: # files=[i for i in files if (i.getSize() > 1000)] # numFiles = len(files) # else: # files=[i for i in files if (i.getSize() > 1000) and not i.getName().endswith("-slack") ] # numFiles = len(files) # else: # files = fileManager.findFiles(dataSource, "%") # if "Include_Slack_Space" in self.List_Of_tables: # files=[i for i in files if (i.getSize() > 1000)] # numFiles = len(files) # else: # files=[i for i in files if (i.getSize() > 1000) and not i.getName().endswith("-slack") ] # numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 FileExtractCount=0 Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() tmp_dir = Case.getCurrentCase().getTempDirectory() if PlatformUtil.isWindowsOS(): self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Carved-Foremost") except: self.log(Level.INFO, "Carved-Foremost Directory already exists " + Temp_Dir) else: self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "/Carved-Foremost") except: self.log(Level.INFO, "Carved-Foremost Directory already exists " + Temp_Dir) for file in files: fileCount += 1 # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if ((file.getSize() > 1000) and (file.getType() != TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) and (file.getType() != TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) and (file.isFile() != False)): self.log(Level.INFO, "Processing file: " + file.getName()) # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. fileGetName=file.getName() if ":" in fileGetName: fileGetName=fileGetName.replace(":","_") else: fileGetName=file.getName() if PlatformUtil.isWindowsOS(): out_dir = os.path.join(Temp_Dir + "\Carved-Foremost", str(file.getId())) try: os.mkdir(Temp_Dir + "\Carved-Foremost\\" + str(file.getId())) except: self.log(Level.INFO, str(file.getId()) + " Directory already exists " + Temp_Dir) else: out_dir = os.path.join(Temp_Dir + "/Carved-Foremost", str(file.getId())) try: os.mkdir(Temp_Dir + "/Carved-Foremost/" + str(file.getId())) except: self.log(Level.INFO, str(file.getId()) + " Directory already exists " + Temp_Dir) lclDbPath=os.path.join(tmp_dir, str(file.getId())) try: ContentUtils.writeToFile(file, File(lclDbPath)) except: pass # Check if output directory exists and if it does then delete it, this may happen with a rerun if os.path.exists(out_dir): shutil.rmtree(out_dir) if os.path.exists(lclDbPath): self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_foremost + " -t " + "jpeg,png,bmp,gif" + " -o " + out_dir + " -i " + lclDbPath) pipe = Popen([self.path_to_exe_foremost, "-t" + "jpeg,png,bmp,gif", "-o", out_dir, "-i", lclDbPath], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if len(os.listdir(out_dir)) == 1: shutil.rmtree(out_dir) os.remove(lclDbPath) else: art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, CarverFilesIngestModuleFactory.moduleName, "New Carved Data and Sqlite Files") art.addAttribute(att) try: # index the artifact for keyword search skCase.getBlackboard().postArtifact(art, moduleName) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) redactresults = out_dir auditLog = os.path.join(redactresults,"audit.txt") if os.path.exists(auditLog): os.remove(auditLog) imagedirs = os.listdir(redactresults) for imagedir in imagedirs: jpgpath=os.path.join(redactresults,imagedir) imagejpgs=os.listdir(jpgpath) for imagejpg in imagejpgs: srcfile=os.path.join(jpgpath,imagejpg) dstfile=os.path.join(redactresults,imagejpg) shutil.move(srcfile,dstfile) shutil.rmtree(jpgpath) extractedfiles = next(os.walk(out_dir))[2] for extractfile in extractedfiles: FileExtractCount=FileExtractCount+1 self.log(Level.INFO, " File Name is ==> " + extractfile) if PlatformUtil.isWindowsOS(): relativeModulepath=Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + "\Carved-Foremost" else: relativeModulepath=Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + "/Carved-Foremost" relativeCarvedpath=os.path.join(relativeModulepath, str(file.getId())) relativelocal_file = os.path.join(relativeCarvedpath, extractfile) local_file = os.path.join(out_dir,extractfile) self.log(Level.INFO, " Local File Name is ==> " + local_file) derived_file=skCase.addDerivedFile(extractfile, relativelocal_file, os.path.getsize(local_file), 0, 0, 0, 0, True, file, "", "foremost", "1.5", "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) os.remove(lclDbPath) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "File Carver Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) message2 = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "File Carver Module", "Found %d images in %d files " % (FileExtractCount,fileCount)) IngestServices.getInstance().postMessage(message2) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); try: self.log(Level.INFO, "Begin Create New Artifacts") artID_pf = skCase.addArtifactType( "TSK_PREFETCH", "Windows Prefetch") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") # Create the attribute type, if it exists then catch the error try: attID_pf_fn = skCase.addArtifactAttributeType("TSK_PREFETCH_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Prefetch File Name") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch File Name. ==> ") try: attID_pf_an = skCase.addArtifactAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Actual File Name") except: self.log(Level.INFO, "Attributes Creation Error, Actual File Name. ==> ") try: attID_nr = skCase.addArtifactAttributeType("TSK_PF_RUN_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Program Number Runs") except: self.log(Level.INFO, "Attributes Creation Error, Program Number Runs. ==> ") try: attID_ex1 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_1", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 1") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 1. ==> ") try: attID_ex2 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_2", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 2") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 2. ==> ") try: attID_ex3 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_3", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 3") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_ex4 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_4", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 4") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 4 ==> ") try: attID_ex5 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_5", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 5") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 5. ==> ") try: attID_ex6 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_6", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 6") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 6. ==> ") try: attID_ex7 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_7", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 7") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 7. ==> ") try: attID_ex8 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_8", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 8") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 8 ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") artID_pf_evt = skCase.getArtifactType("TSK_PREFETCH") attID_pf_fn = skCase.getAttributeType("TSK_PREFETCH_FILE_NAME") attID_pf_an = skCase.getAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME") attID_nr = skCase.getAttributeType("TSK_PF_RUN_COUNT") attID_ex1 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_1") attID_ex2 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_2") attID_ex3 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_3") attID_ex4 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_4") attID_ex5 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_5") attID_ex6 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_6") attID_ex7 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_7") attID_ex8 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_8") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the prefetch files and the layout.ini file from the /windows/prefetch folder fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.pf") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create prefetch directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "Prefetch_Files") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "Prefetch Directory already exists " + Temp_Dir) # Write out each prefetch file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Case.getCurrentCase().getTempDirectory()) subprocess.Popen([self.path_to_exe, Temp_Dir, os.path.join(Temp_Dir, "Autopsy_PF_DB.db3")]).communicate()[0] # Set the database to be read to the once created by the prefetch parser program lclDbPath = os.path.join(Temp_Dir, "Autopsy_PF_DB.db3") self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select prefetch_File_Name, actual_File_Name, Number_time_file_run, " + " Embeded_date_Time_Unix_1, " + " Embeded_date_Time_Unix_2, " + " Embeded_date_Time_Unix_3, " + " Embeded_date_Time_Unix_4, " + " Embeded_date_Time_Unix_5, " + " Embeded_date_Time_Unix_6, " + " Embeded_date_Time_Unix_7, " + " Embeded_date_Time_Unix_8 " + " from prefetch_file_info ") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") Prefetch_File_Name = resultSet.getString("Prefetch_File_Name") Actual_File_Name = resultSet.getString("Actual_File_Name") Number_Of_Runs = resultSet.getString("Number_Time_File_Run") Time_1 = resultSet.getInt("Embeded_date_Time_Unix_1") Time_2 = resultSet.getInt("Embeded_date_Time_Unix_2") Time_3 = resultSet.getInt("Embeded_date_Time_Unix_3") Time_4 = resultSet.getInt("Embeded_date_Time_Unix_4") Time_5 = resultSet.getInt("Embeded_date_Time_Unix_5") Time_6 = resultSet.getInt("Embeded_date_Time_Unix_6") Time_7 = resultSet.getInt("Embeded_date_Time_Unix_7") Time_8 = resultSet.getInt("Embeded_date_Time_Unix_8") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, Prefetch_File_Name) for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) art = file.newArtifact(artID_pf) #self.log(Level.INFO, "Attribute Number ==>" + str(attID_pf_fn) + " " + str(attID_pf_an) ) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_pf_fn, ParsePrefetchDbIngestModuleFactory.moduleName, Prefetch_File_Name)), \ (BlackboardAttribute(attID_pf_an, ParsePrefetchDbIngestModuleFactory.moduleName, Actual_File_Name)), \ (BlackboardAttribute(attID_nr, ParsePrefetchDbIngestModuleFactory.moduleName, Number_Of_Runs)), \ (BlackboardAttribute(attID_ex1, ParsePrefetchDbIngestModuleFactory.moduleName, Time_1)), \ (BlackboardAttribute(attID_ex2, ParsePrefetchDbIngestModuleFactory.moduleName, Time_2)), \ (BlackboardAttribute(attID_ex3, ParsePrefetchDbIngestModuleFactory.moduleName, Time_3)), \ (BlackboardAttribute(attID_ex4, ParsePrefetchDbIngestModuleFactory.moduleName, Time_4)), \ (BlackboardAttribute(attID_ex5, ParsePrefetchDbIngestModuleFactory.moduleName, Time_5)), \ (BlackboardAttribute(attID_ex6, ParsePrefetchDbIngestModuleFactory.moduleName, Time_6)), \ (BlackboardAttribute(attID_ex7, ParsePrefetchDbIngestModuleFactory.moduleName, Time_7)), \ (BlackboardAttribute(attID_ex8, ParsePrefetchDbIngestModuleFactory.moduleName, Time_8)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) #Clean up prefetch directory and files for file in files: try: os.remove(os.path.join(Temp_Dir, file.getName())) except: self.log(Level.INFO, "removal of prefetch file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of prefetch directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Prefetch Analyzer", " Prefetch Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log( Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: name = resultSet.getString("name") email = resultSet.getString("email") phone = resultSet.getString("phone") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON. getTypeID(), ContactsDbIngestModuleFactory.moduleName, name)) art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID( ), ContactsDbIngestModuleFactory.moduleName, email)) art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER. getTypeID(), ContactsDbIngestModuleFactory.moduleName, phone)) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() thumb_files = fileManager.findFiles(dataSource, "thumbcache_%.db", "") numFiles = len(thumb_files) self.log(Level.INFO, "Number of Thumbs.db files found ==> " + str(numFiles)) # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() tmp_dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Thumbcache") except: self.log(Level.INFO, "Thumbcache directory already exists " + Temp_Dir) for thumb_file in thumb_files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + thumb_file.getName()) #fileCount += 1 out_dir = os.path.join(Temp_Dir + "\Thumbcache", str(thumb_file.getId()) + "-" + thumb_file.getName()) try: os.mkdir(Temp_Dir + "\Thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName()) except: self.log(Level.INFO, str(thumb_file.getId()) + "-" + thumb_file.getName() + " Directory already exists " + Temp_Dir) # Save the thumbs.DB locally in the ModuleOutput folder. use file id as name to reduce collisions lclDbPath = os.path.join(tmp_dir, str(thumb_file.getId()) + "-" + thumb_file.getName()) ContentUtils.writeToFile(thumb_file, File(lclDbPath)) # Run thumbs_viewer against the selected Database self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_thumbs + " -O " + out_dir + " " + lclDbPath) pipe = Popen([self.path_to_exe_thumbs, "-O", out_dir, lclDbPath], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Get the parent abstract file Information abstract_file_info = skCase.getAbstractFileById(thumb_file.getId()) #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info)) files = next(os.walk(out_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(out_dir, file) local_file = os.path.join("ModuleOutput\\thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName(), file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_dervived_existance(dataSource, file, abstract_file_info)): # Add dervived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type dervived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, abstract_file_info, "", "thumbcache_viewer_cmd.exe", "1.0.3.4", "", TskData.EncodingType.NONE) #self.log(Level.INFO, "Derived File ==> " + str(dervived_file)) else: pass try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of thumbcache file " + lclDbPath + " failed " ) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Thumbcache", " Thumbcache Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "WebcacheV01.dat") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Webcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Webcache Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId())) DbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, file.getName() + ' ==> ' + str(file.getId()) + ' ==> ' + file.getUniquePath()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + DbPath) #subprocess.Popen([self.path_to_exe, lclDbPath, DbPath]).communicate()[0] pipe = Popen([self.path_to_exe, lclDbPath, DbPath], stdout=PIPE, stderr=PIPE, cwd=os.path.dirname(os.path.abspath(__file__))) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) for file in files: # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, "Path the Webcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select distinct container_name from all_containers;") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK Container_List = [] while resultSet.next(): Container_List.append(resultSet.getString("container_name")) #self.log(Level.INFO, "Number of containers ==> " + str(len(Container_List)) + " ==> " + str(Container_List)) #self.log(Level.INFO, "Number of containers ==> " + str(Container_List) # Cycle through each row and create artifacts for c_name in Container_List: try: container_name = c_name #self.log(Level.INFO, "Result (" + container_name + ")") #self.log(Level.INFO, "Result get information from table " + container_name + " ") SQL_String_1 = "Select * from all_containers where container_name = '" + container_name + "';" SQL_String_2 = "PRAGMA table_info('All_Containers')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_WC_" + container_name.upper() artifact_desc = "WebcacheV01 " + container_name.upper() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_web = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_web = skCase.getArtifactTypeID(artifact_name) artID_web_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_web) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ==> " + Column_Types[Column_Number - 1]) #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttrTypeID(c_name) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseWebcacheIngestModuleFactory.moduleName, artID_web_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up #stmt.close() #dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()))) os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3")) except: self.log(Level.INFO, "removal of Webcache file failed " + temp_dir + "\\" + file.getName() + "-" + str(file.getId())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Webcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Webcache Parser", " Webcache Has Been Parsed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "manifest-%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; artifactId = 0 try: artId = skCase.addArtifactType("TSK_LEVELDB", "LevelDb Database(s)") artifactId = skCase.getArtifactTypeID("TSK_LEVELDB") except: artifactId = skCase.getArtifactTypeID("TSK_LEVELDB") self.log(Level.INFO, "Artifacts Creation Error for artifact ==> TSK_LEVELDB") # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "LevelDb") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing Path: " + file.getParentPath()) fileCount += 1 manifestDirectory = os.path.join(temporaryDirectory, str(file.getId())) try: os.mkdir(manifestDirectory) except: self.log(Level.INFO, "Temporary directory already exists " + manifestDirectory) levelDbFiles = fileManager.findFilesByParentPath(dataSource.getId(), file.getParentPath()) levelDbFileNum = len(levelDbFiles) self.log(Level.INFO, "found " + str(levelDbFileNum) + " files") for levelDbFile in levelDbFiles: # Save the file locally. Use file id as name to reduce collisions self.log(Level.INFO, "Copying file " + levelDbFile.getName() + " to temp") if levelDbFile.getName() == "." or levelDbFile.getName() == ".." or "-slack" in levelDbFile.getName(): self.log(Level.INFO, "Not a valid file to copy") else: extractedLevelDbFile = os.path.join(manifestDirectory, levelDbFile.getName()) ContentUtils.writeToFile(levelDbFile, File(extractedLevelDbFile)) #os.remove(extractedFile) sqliteDbFile = os.path.join(temporaryDirectory, str(file.getId()) + ".db3") csvOutFile = os.path.join(temporaryDirectory, str(file.getId())) self.log(Level.INFO, str(self.pathToExe) + " " + str(manifestDirectory) + " " + str(csvOutFile)) # self.log(Level.INFO, str(self.pathToExe) + " " + str(manifestDirectory) + " " + str(sqliteDbFile) + " " + str(csvOutFile)) # pipe = Popen([self.pathToExe, manifestDirectory, sqliteDbFile + ".db3", csvOutFile + ".csv"], stdout=PIPE, stderr=PIPE) pipe = Popen([self.pathToExe, manifestDirectory, csvOutFile], stdout=PIPE, stderr=PIPE) outputFromRun = pipe.communicate()[0] self.log(Level.INFO, "Output from Run is ==> " + outputFromRun) attribute_names = ["TSK_NAME", "TSK_VALUE"] with open(csvOutFile + ".csv", 'rU') as csvfile: csvreader = csv.reader(csvfile, delimiter=',', quotechar='|') for row in csvreader: art = file.newArtifact(artifactId) for (data, head) in zip(row, attribute_names): art.addAttribute(BlackboardAttribute(skCase.getAttributeType(head), LeveldbParserIngestModuleFactory.moduleName, data)) #self.log(Level.INFO, "artifact_created. ==> " + "TSK_CSV_" + col_name.upper() + " =====> " + data) art.addAttribute(BlackboardAttribute(skCase.getAttributeType("TSK_PATH"), LeveldbParserIngestModuleFactory.moduleName, file.getParentPath())) #self.processFbChat(databaseFile) #self.processChats(skCase, file) #os.remove(extractedFile) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "LevelDb Parser", " LevelDb's Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def parse_sqlite_data(self, dataSource, progressBar, os_version, mac_os_art_id, settings_db): # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) macos_recents.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the history_visits table in the database and get all columns. try: stmt = dbConn.createStatement() macos_version_sql = "select mac_osx_art_id, mac_osx_art_type, mac_osx_art_File_Name, mac_osx_art_dir_name, " + \ " mac_osx_art_database_name, mac_osx_art_sql_statement, os_version, " + \ " os_name from mac_artifact a, os_version b where a.os_id = b.os_id and b.os_version = '" + os_version + "'" + \ " and mac_osx_art_id = " + str(mac_os_art_id) + ";" self.log(Level.INFO, macos_version_sql) resultSet = stmt.executeQuery(macos_version_sql) self.log(Level.INFO, "query recent version table") except SQLException as e: self.log(Level.INFO, "Error querying database for recent version (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Get the artifact name and create it. try: stmt_2 = dbConn.createStatement() artifact_sql = "select distinct autopsy_art_type, autopsy_art_name, autopsy_art_description " + \ " from autopsy_artifact a, Art_att_mac_xref b where a.autopsy_art_id = b.autopsy_art_id " + \ " and b.mac_osx_art_id = " + resultSet.getString("mac_osx_art_id") + ";" resultSet_art = stmt_2.executeQuery(artifact_sql) self.log(Level.INFO, "Artifact Type (" + resultSet_art.getString("autopsy_art_type") + ")") if resultSet_art.getString("autopsy_art_type") != 'AUTOPSY': try: self.log(Level.INFO, "Begin Create New Artifacts ==> " + resultSet_art.getString("autopsy_art_name")) artID_art = skCase.addArtifactType( resultSet_art.getString("autopsy_art_name"), \ resultSet_art.getString("autopsy_art_desctiption")) self.artifact_name = resultSet_art.getString("autopsy_art_name") except: self.log(Level.INFO, "Artifacts Creation Error, artifact " + resultSet_art.getString("autopsy_art_name") + " exists. ==> ") else: self.artifact_name = resultSet_art.getString("autopsy_art_name") # Get the attribute types and create them stmt_3 = dbConn.createStatement() attribute_sql = "select distinct autopsy_attrib_type, autopsy_attrib_name, autopsy_attrib_desc, autopsy_attrib_value_type_desc " + \ " from autopsy_attribute a, Art_att_mac_xref b, autopsy_value_type c " + \ " where a.autopsy_attrib_id = b.autopsy_attrib_id and a.autopsy_attrib_value_type = c.autopsy_attrib_value_type " + \ " and b.mac_osx_art_id =" + resultSet.getString("mac_osx_art_id") + ";" self.log(Level.INFO, "Attribute SQL ==> " + attribute_sql) resultSet_att = stmt_3.executeQuery(attribute_sql) while resultSet_att.next(): if resultSet_att.getString("autopsy_attrib_type") == 'CUSTOM': if resultSet_att.getString("autopsy_attrib_value_type_desc") == 'String': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Integer': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Long': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Double': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") elif resultSet_att.getString("autopsy_attrib_value_type_desc") == 'Byte': try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") else: try: attID_vss_num = skCase.addArtifactAttributeType(resultSet_att.getString("autopsy_attrib_name"), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, resultSet_att.getString("autopsy_attrib_desc")) except: self.log(Level.INFO, "Attributes Creation Error for ," + resultSet_att.getString("autopsy_attrib_name") + " ==> ") except SQLException as e: self.log(Level.INFO, "Error querying database for artifacts/attributes (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): # Set the database to be read to the once created by the prefetch parser program macos_file_name = resultSet.getString("mac_osx_art_File_Name") macos_dir_name = resultSet.getString("mac_osx_art_dir_name") macos_database_name = resultSet.getString("mac_osx_art_database_name") #macos_table_name = resultSet.getString("mac_osx_art_table_name") #self.path_to_plist_exe = os.path.join(os.path.dirname(os.path.abspath(__file__)), resultSet.getString("mac_osx_art_exec_file")) fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, macos_file_name + "%", macos_dir_name) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; all_files = [] # do this since we want to get the wal or journal files associated with the SQLite database but we want to # make sure we have them to use if numFiles > 1: for file in files: if file.getName() == macos_file_name: self.log(Level.INFO, file.getParentPath()) all_files.append(file) files_to_process = all_files # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\macos_recent") except: self.log(Level.INFO, "macos_recent Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory file_id = 0 for file in files: #self.log(Level.INFO, str(file)) # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions, also add file id to wal and journal files # if needed so that it can use the journals. self.log(Level.INFO, "File Name ==> " + file.getName() + " <==> " + macos_database_name) if file.getName().upper() == macos_database_name.upper(): file_id = file.getId() self.log(Level.INFO, "File Name ==> " + file.getName() + " <==> " + macos_database_name + " <++> " + str(file.getId())) lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + file.getName()) self.log(Level.INFO, " Database name ==> " + lclDbPath) ContentUtils.writeToFile(file, File(lclDbPath)) else: lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + file.getName()) self.log(Level.INFO, " Database name ==> " + lclDbPath) ContentUtils.writeToFile(file, File(lclDbPath)) lclDbPath = os.path.join(Temp_Dir + "\macos_recent", str(file_id) + "-" + macos_database_name) lclFilePath = os.path.join(Temp_Dir + "\macos_recent", macos_file_name) self.log(Level.INFO, " Database name ==> " + lclDbPath + " File Path ==> " + lclFilePath) for file in files_to_process: # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory() + "\macos_recent", str(file.getId()) + "-" + macos_database_name) self.log(Level.INFO, "Path the Safari History.db database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the history_visits table in the database and get all columns. try: stmt_1 = dbConn.createStatement() macos_recent_sql = resultSet.getString("mac_osx_art_sql_statement") self.log(Level.INFO, macos_recent_sql) resultSet_3 = stmt_1.executeQuery(macos_recent_sql) self.log(Level.INFO, "query " + macos_database_name + " table") except SQLException as e: self.log(Level.INFO, "Error querying database for history table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK artID_hst = skCase.getArtifactTypeID(self.artifact_name) artID_hst_evt = skCase.getArtifactType(self.artifact_name) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range (1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType(col_name) self.log(Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log(Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseMacOS_RecentIngestModuleFactory.moduleName, resultSet_3.getReal(col_name))) except: self.log(Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log(Level.INFO, "Error getting values from web_history table (" + e.getMessage() + ")") IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseMacOS_RecentIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_3.close() stmt_2.close() stmt_1.close() stmt.close() dbConn.close() # Clean up os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\macos_recent\\" + file.getName()) except: self.log(Level.INFO, "removal of Safari History file failed " + Temp_Dir + "\\macos_recent" + file.getName()) try: os.rmdir(Temp_Dir + "\\macos_recent") except: self.log(Level.INFO, "removal of Safari History directory failed " + Temp_Dir)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log(Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: name = resultSet.getString("name") email = resultSet.getString("email") phone = resultSet.getString("phone") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID(), ContactsDbIngestModuleFactory.moduleName, name)) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(), ContactsDbIngestModuleFactory.moduleName, email)) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), ContactsDbIngestModuleFactory.moduleName, phone)) # This will work in 4.0.1 and beyond #try: # # index the artifact for keyword search # blackboard.indexArtifact(art) #except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "staterepository-machine%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Appx_Programs") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) #os.remove(extractedFile) for file in files: #os.remove(extractedFile) if file.getName().lower() == "staterepository-machine.srd": extractedFile = os.path.join(temporaryDirectory, file.getName()) artIdInsProg = skCase.getArtifactTypeID("TSK_INSTALLED_PROG") artIdInsProgType = skCase.getArtifactType("TSK_INSTALLED_PROG") moduleName = ProcessAppxProgramsIngestModuleFactory.moduleName try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extractedFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select distinct * from (Select packfam.name, packfam.publisher, packfam.publisherid, packuser.user, " + \ " case Architecture when 0 then 'X64' when 9 then 'x86' when 11 then 'Neutral' else Architecture end Architecture, " + \ " pack.ResourceId, " + \ " substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1, instr(substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1), '_') - 1) version, " + \ " packfam.packageFamilyname, pack.packageFullName, '??' isFramework, '??' PackageUserInformaton, " + \ " '??' isResourcePakage, '??' IsBundle, '??' IsDevelopment, '??' Dependicies, '??' IsPartiallyStaged, " + \ " case SignatureOrigin when 3 then 'System' when 2 then 'Store' else 'Unknown' end SignatureKind, packuser.PackageStatus Status, " + \ " (substr(packuser.installTime,1,11) -11644473600) InstallTime, packloc.installedLocation " + \ " from PackageUser packuser, package pack, packageFamily packfam, packageLocation packloc " + \ " where packuser.package = pack._PackageId and pack.packageFamily = packfam._PackagefamilyId " + \ " and packloc.package = pack._packageId and (pack.resourceId is null or pack.resourceId = 'neutral')); ") self.log(Level.INFO, "query Appx tables") except SQLException as e: self.log(Level.INFO, "Error querying database for appx tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK # Cycle through each row and get the installed programs and install time while resultSet.next(): try: artInsProg = file.newArtifact(artIdInsProg) attributes = ArrayList() attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, moduleName, resultSet.getString("name"))) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), moduleName, resultSet.getInt("InstallTime"))) artInsProg.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artInsProg) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from Appx tables (" + e.getMessage() + ")") # Close the database statement try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Appx Installed Programs", " Appx Installed Programs Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Start timer for file copy operation start_copy_time = time.time() # case insensitive SQL LIKE clause is used to query the case database # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.4.1/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = [] for extension in self.extensions: try: files.extend(fileManager.findFiles( dataSource, "%" + extension)) except TskCoreException: self.log(Level.INFO, "Error getting files from: '" + extension + "'") numFiles = len(files) if not numFiles: self.log(Level.WARNING, "Didn't find any usable files!") return IngestModule.ProcessResult.OK # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK output_dir = Case.getCurrentCase().getModuleDirectory() module_dir = os.path.join(output_dir,dataSource.getName(),C_FDRI_DIR) # Create top-level DIR to save FDIR's created files full_dirname_dataSource = os.path.join(output_dir,dataSource.getName()) if not os.path.exists(full_dirname_dataSource): os.mkdir(full_dirname_dataSource) # TEMP is needed by Autopsy temp_dir = os.path.join(Case.getCurrentCase().getTempDirectory(), dataSource.getName()) if not os.path.exists(temp_dir): os.mkdir(temp_dir) temp_dir = os.path.join(temp_dir, C_FDRI_DIR) if not os.path.exists(temp_dir): os.mkdir(temp_dir) # We always copy the files (except if a copy already exists) # as we will want to change them. # We detect the existence of a previous copy if the creation of the dir # 'module_dir' triggers an exception try: os.mkdir(module_dir) except: self.log(Level.INFO, "Directory already exists for this module") #---------------------------------------- # Init file which holds filenames + size #---------------------------------------- file_path = os.path.join(module_dir,C_FILE_WITH_FNAMES_AND_SIZES) fnames_and_sizes_F = open(file_path,"w") fnames_and_sizes_F.write(C_SEP_S) fnames_and_sizes_F.write("# Filename:size (bytes)\n") timestamp_S = datetime.now().strftime('%Y-%m-%d_%Hh%Mm%Ss') fnames_and_sizes_F.write("# START: %s\n" % (timestamp_S)) fnames_and_sizes_F.write(C_SEP_S) # Dict to detect identical files files_hash_D = {} # Flag to record whether files were copied or not were_files_copied = True # Minimum size (in bytes) for an image file to be processed total_files = 0 total_small_files = 0 # A initial version mispelled 'Annotated"... avoid_prefix_1 = "Anotated_" avoid_prefix_2 = "Annotated_" try: dir_img = os.path.join(module_dir,"img") os.mkdir(dir_img) dir_small_files = os.path.join(module_dir,"small_files") + "\\" os.mkdir(dir_small_files) for file in files: total_files = total_files + 1 filename_S = file.getName() Log_S = "" if filename_S.find(avoid_prefix_1) is 0: Log_S = "%s file found '%s': skipping" %\ (avoid_prefix_1,filename_S) elif filename_S.find(avoid_prefix_2) is 0: Log_S = "%s file found '%s': skipping" %\ (avoid_prefix_2,filename_S) if len(Log_S): # Annotated_ found # Log and skip this file self.log(Level.INFO, Log_S) continue file_size = file.getSize() filename, file_extension = os.path.splitext(file.getName()) # Record filename and file size in C_FILE_WITH_FNAMES_AND_SIZES fnames_and_sizes_F.write("%s:%d\n" %(file.getName(),file_size)) # If file size is more than C_FILE_MIN_SIZE # TODO:: User Choice as option if file_size >= C_FILE_MIN_SIZE: new_fname = "%s__id__%s%s" %\ (filename,str(file.getId()),file_extension) fullpath_dest = os.path.join(dir_img,new_fname) ContentUtils.writeToFile(file, File(fullpath_dest)) # We copy small files to a different DIR, so that we # can look at them, if needed if file_size < C_FILE_MIN_SIZE: total_small_files = total_small_files + 1 dest_filename = "%s%s__id__%d%s" %\ (dir_small_files,filename,file.getId(),file_extension) ContentUtils.writeToFile(file, File(dest_filename)) Log_S = "Skipping file: %s (%d bytes)" %\ (file.getName(),file.getSize()) # LOG self.log(Level.INFO, Log_S) #-------------------------------- # Code to detect repeated files # We simply use a dictionary # keyed by the MD5 of the file # Patricio #-------------------------------- if file_size > 0: md5_hash = self.create_hash(file, "md5") if md5_hash in files_hash_D: # hash already exists: repetition files_hash_D[md5_hash].append(file.getName()) else: # hash doesn't yet exist in dictionary: 1st time files_hash_D[md5_hash] = [file_size,file.getName()] ##except: except Exception, e: were_files_copied = False self.log(Level.INFO,"Image folder already exists, skiping file copy") self.log(Level.INFO,"Exception: " + str(e))
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() filesToExtract = ("SAM", "SAM.LOG1", "SAM.LOG2", "SYSTEM", "SYSTEM.LOG1", "SYSTEM.LOG2") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() # Create BAM directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "bam") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "bam Directory already exists " + temp_dir) systemAbsFile = [] for fileName in filesToExtract: files = fileManager.findFiles(dataSource, fileName, "Windows/System32/Config") numFiles = len(files) #self.log(Level.INFO, "Number of SAM Files found ==> " + str(numFiles)) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Parent Path ==> " + str(file.getParentPath())) if file.getParentPath() == '/Windows/System32/config/': # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) if file.getName() == 'SYSTEM': systemAbsFile = file else: self.log(Level.INFO, "Skipping File " + file.getName() + " In Path " + file.getParentPath()) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on " + self.pathToExe + temp_dir + " " + os.path.join(temp_dir, 'bam.csv')) pipe = Popen([self.pathToExe, temp_dir, os.path.join(temp_dir, "bam.csv")], stdout=PIPE, stderr=PIPE) outText = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + outText) # Setup Artifact try: self.log(Level.INFO, "Begin Create New Artifacts") artID_ls = skCase.addArtifactType( "TSK_BAM_KEY", "BAM Registry Key") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artifactName = "TSK_BAM_KEY" artIdCsv = skCase.getArtifactTypeID(artifactName) # Read CSV File and Import into Autopsy headingRead = False attributeNames = [] with open(os.path.join(temp_dir, 'bam.csv'), 'rU') as csvfile: csvreader = csv.reader(csvfile, delimiter=',', quotechar='|') for row in csvreader: if not headingRead: for colName in row: attributeNames.append(colName.upper().strip()) headingRead = True else: art = systemAbsFile.newArtifact(artIdCsv) for (data, head) in zip(row, attributeNames): try: art.addAttribute(BlackboardAttribute(skCase.getAttributeType(head), BamKeyIngestModuleFactory.moduleName, data)) except: art.addAttribute(BlackboardAttribute(skCase.getAttributeType(head), BamKeyIngestModuleFactory.moduleName, int(data))) #Clean up prefetch directory and files try: shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of directory tree failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "BamKey", " BamKey Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet #progressBar.switchToIndeterminate() # get current case and the TeraCopy main.db abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "main.db", "%TeraCopy%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; moduleName = ProcessTeraCopyDbIngestModuleFactory.moduleName # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "TeraCopy") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) filePathId = {} for file in files: fileName = file.getName() if fileName.endswith(".db"): filePathId[file.getParentPath()] = file.getId() #self.log(Level.INFO, "file path and id ==> " + str(file.getParentPath()) + " <> " + str(file.getId()) + " <> " + str(fileName)) if numFiles > 0: for artifact in self.stringColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") for artifact in self.dateColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") try: #self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_TERACOPY_DB") artID_art = skCase.addArtifactType("TSK_TERACOPY_DB", "Teracopy History DB") except: self.log(Level.INFO, "Artifacts Creation Error, artifact TSK_TERACOPY_DB exists. ==> ") artTeraCopyId = skCase.getArtifactTypeID("TSK_TERACOPY_DB") #self.log(Level.INFO, "Artifact id ==> " + str(artTeraCopyId)) artTeraCopy = skCase.getArtifactType("TSK_TERACOPY_DB") moduleName = ProcessTeraCopyDbIngestModuleFactory.moduleName # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions fileId = filePathId[file.getParentPath()] extractedFile = os.path.join(temporaryDirectory, str(fileId) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) userpath = file.getParentPath() username = userpath.split('/') #self.log(Level.INFO, "Getting Username " + username[2] ) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extractedFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select name TSK_HISTORY_FILE, SOURCE TSK_SOURCE_LOCATION, target TSK_TARGET_LOCATION, " + \ " CASE operation WHEN 1 THEN 'Copy' WHEN 2 THEN 'Move' WHEN 3 THEN 'Test' WHEN 6 THEN " + \ " 'Delete' END TSK_OPERATION_TYPE, strftime('%s', started) TSK_DATETIME_START, " + \ " strftime('%s', finished) TSK_DATETIME_END from list") #self.log(Level.INFO, "query list table") except SQLException as e: self.log(Level.INFO, "Error querying database for list tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK while resultSet.next(): historyFile = resultSet.getString("TSK_HISTORY_FILE") fileManagerHist = Case.getCurrentCase().getServices().getFileManager() histFiles = fileManagerHist.findFiles(dataSource, historyFile + "%", "%TeraCopy%") numHistFiles = len(histFiles) #self.log(Level.INFO, "Number of files ==> " + str(numHistFiles)) #self.log(Level.INFO, "Files ==> " + str(histFiles)) sourceLocation = resultSet.getString('TSK_SOURCE_LOCATION') targetLocation = resultSet.getString('TSK_TARGET_LOCATION') operationType = resultSet.getString('TSK_OPERATION_TYPE') startTime = resultSet.getInt('TSK_DATETIME_START') endTime = resultSet.getInt('TSK_DATETIME_END') for histFile in histFiles: extractedHistFile = os.path.join(temporaryDirectory, str(histFile.getId()) + "-" + historyFile) #self.log(Level.INFO, "History File ==> " + extractedHistFile) ContentUtils.writeToFile(histFile, File(extractedHistFile)) try: # Class.forName("org.sqlite.JDBC").newInstance() dbConnHist = DriverManager.getConnection("jdbc:sqlite:%s" % extractedHistFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedHistFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmtHist = dbConnHist.createStatement() resultSetHist = stmtHist.executeQuery("SELECT SOURCE TSK_FILE_PATH, CASE State WHEN 0 THEN 'Added' " + \ " WHEN 1 THEN 'OK' WHEN 2 THEN 'Verified' " + \ " WHEN 3 THEN 'Error' WHEN 4 THEN 'Skipped' WHEN 5 THEN 'Deleted' " + \ " WHEN 6 THEN 'Moved' END TSK_OPERATION_STATE, SIZE TSK_FILE_SIZE, " + \ " Attributes TSK_ATTRIBUTES, CASE IsFolder WHEN 0 THEN '' WHEN 1 THEN 'Yes' " + \ " END TSK_ISFOLDER, strftime('%s', Creation) TSK_DATETIME_CREATED, " + \ " strftime('%s', Access) TSK_DATETIME_ACCESSED, " + \ " strftime('%s', Write) TSK_DATETIME_MODIFIED, " + \ " SourceCRC TSK_SOURCE_CRC, TargetCRC TSK_TARGET_CRC, Message TSK_MESSAGE " + \ " FROM Files ") #self.log(Level.INFO, "query list table") except SQLException as e: self.log(Level.INFO, "Error querying database for list tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK meta = resultSetHist.getMetaData() columnCount = meta.getColumnCount() columnNames = [] # self.log(Level.INFO, "Number of Columns in the table ==> " + str(columnCount)) for x in range (1, columnCount + 1): # self.log(Level.INFO, "Column Count ==> " + str(x)) # self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) columnNames.append(meta.getColumnLabel(x)) while resultSetHist.next(): ## Cycle through each row and get the data ## self.log(Level.INFO, "Start PRocessing") # while resultSet.next(): try: artifact = file.newArtifact(artTeraCopyId) attributes = ArrayList() attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_HISTORY_FILE'), moduleName, historyFile)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_SOURCE_LOCATION'), moduleName, sourceLocation)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_TARGET_LOCATION'), moduleName, targetLocation)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_OPERATION_TYPE'), moduleName, operationType)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_DATETIME_START'), moduleName, startTime)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_DATETIME_END'), moduleName, endTime)) for x in range(0, columnCount): if columnNames[x] in self.dateColumn: # self.log(Level.INFO, "Date ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSetHist.getInt(columnNames[x]))) else: # self.log(Level.INFO, "ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSetHist.getString(columnNames[x]))) # self.log(Level.INFO, "Column Count ==> " + str(x)) artifact.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artifact) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from files table (" + e.getMessage() + ")") # Close the database statement try: stmtHist.close() dbConnHist.close() except: pass try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "TeraCopy DB", " TeraCopy DB Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the status of the progress bar progressBar.progress("Creating/Mounting the Virtual Disk") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); # Create the directory to write the vhd file to mod_dir = Case.getCurrentCase().getModulesOutputDirAbsPath() vdisk_dir = os.path.join(mod_dir, "Preview_VHD") try: os.mkdir(vdisk_dir) except: self.log(Level.INFO, "Virtual disk directory already exists in Module Directory") vdisk_name = os.path.join(vdisk_dir, Case.getCurrentCase().getNumber() + "_preview.vhd") # Get the size of the image file in megs size_of_disk = dataSource.getSize() // 1048576 self.log(Level.INFO, "size of disk is ==> " + str(size_of_disk)) (vdisk_create_script, vdisk_unmount_script, vdisk_mount_script, drive_letter) = self.Create_Diskpart_Script(size_of_disk, vdisk_name) # Run Diskpart using the scripts that will create a VHD # If disk already exists then just mount it otherwise create it, mount it and format it if os.path.exists(vdisk_name): self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_mount_script) pipe = Popen(["diskpart.exe", "-S", vdisk_mount_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) else: self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_create_script) pipe = Popen(["diskpart.exe", "-S", vdisk_create_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Make the top level directory the datasource name try: data_source_dir = os.path.join(drive_letter + "\\", dataSource.getName()) os.mkdir(data_source_dir) except: self.log(Level.INFO, "Data source Directory already exists") # Create log file for the number of extensions found try: mod_log_file = os.path.join(vdisk_dir, "File_Extensions_Written_Log_" + dataSource.getName() + ".csv") self.log(Level.INFO, "Output Directory is ==> " + mod_log_file) mod_log = open(mod_log_file, "w") mod_log.write('Directory_In,File_Extension,Number_Of_Files_Written \n') out_log_file = os.path.join(drive_letter + "\\", "File_Extensions_Written_Log_" + dataSource.getName() + ".csv") self.log(Level.INFO, "Output Directory is ==> " + out_log_file) out_log = open(out_log_file, "w") out_log.write('Directory_In,File_Extension,Number_Of_Files_Written \n') except: self.log(Level.INFO, "Log File creation error") # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.file_extension_db) except SQLException as e: self.log(Level.INFO, "Could not open File Extension database " + self.file_extension_db + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Get all the file extensions that we want to find and export to the Preview Disk try: stmt = dbConn.createStatement() SQL_Statement = "select Output_Directory, File_Extension from File_Extensions_To_Export" self.log(Level.INFO, "SQL Statement --> " + SQL_Statement) resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for File_Extensions_To_Export table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # Update the progress bar with the type of Document we are extracting progressBar.progress("Extracting " + resultSet.getString('Output_Directory') + " Files") fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%." + resultSet.getString("File_Extension"), "") numFiles = len(files) self.log(Level.INFO, "Number of files found for file extension " + resultSet.getString("File_Extension") + " ==> " + str(numFiles)) try: mod_log.write(resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") out_log.write(resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") except: self.log(Level.INFO, " Error Writing Log File ==> " + resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") # Need to create log file here # Try and create directory to store files in, may already be created so we will ignore if it does try: dir_to_write_to = os.path.join(data_source_dir, resultSet.getString('Output_Directory')) if not os.path.exists(dir_to_write_to): os.mkdir(dir_to_write_to) except: self.log(Level.INFO, "Directory " + resultSet.getString('Output_Directory') + " already exists.") # Write all the files to the vhd for file in files: lclfile = os.path.join(dir_to_write_to, str(file.getId()) + "-" + file.getName()) #self.log(Level.INFO, "File to write ==> " + lclfile) ContentUtils.writeToFile(file, File(lclfile)) except: self.log(Level.INFO, "Error in processing sql statement") # Close the log file try: mod_log.close() out_log.close() except: self.log(Level.INFO, "Error closing log files, they might not exist") # Set the progress bar to unmounting progressBar.progress("Unmounting The Virtual Disk") # Run Diskpart using the scripts to unmount the VHD self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_unmount_script) pipe = Popen(["diskpart.exe", "-S", vdisk_unmount_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Clean up stmt.close() dbConn.close() #Clean up prefetch directory and files try: shutil.rmtree(os.path.join(Case.getCurrentCase().getTempDirectory(), "vdisk_scripts")) except: self.log(Level.INFO, "removal of vdisk script directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "VDiskCreate", " VDiskCreate Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if len(self.List_Of_tables) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, self.moduleName, " Can't find my tables " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR # Check if this is Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "Amcache.hve") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") fileCount = 0; Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "Found temporary directory: " + Temp_Dir) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Scan", " Parsing Amcache.Hve " ) IngestServices.getInstance().postMessage(message) # Dump Amcache.hve files in the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, str(file.getId()) + '-amcache.hve') ContentUtils.writeToFile(file, File(lclDbPath)) mydb = Temp_Dir + "\\" + str(file.getId()) + "-myAmcache.db3" # Parse some keys self.log(Level.INFO, "[Executable #1] Parsing Amcache.Hve: \"" + self.my_exe + "\" -r " + lclDbPath + " -d " + mydb) subprocess.Popen([self.my_exe, '-r', lclDbPath, '-d', mydb]).communicate()[0] try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + mydb + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK for am_table_name in self.List_Of_tables: if am_table_name == 'root_file_virustotal_scan': # <-- because we haven't run the executable these tables yet continue if am_table_name == 'inventory_application_file_virustotal_scan': continue try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('" + am_table_name + "'); ") self.log(Level.INFO, "query SQLite Master table for " + am_table_name) except SQLException as e: self.log(Level.INFO, "Error querying database for table " + am_table_name + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" artifact_name = "TSK_" + table_name.upper() artifact_desc = "Amcache " + table_name.upper() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_amc = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_amc = skCase.getArtifactTypeID(artifact_name) artID_amc_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error (string), " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error (string2), " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error (long), " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_amc) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_" + col_name attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(AmcacheScanIngestModuleFactory.moduleName, artID_amc_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") stmt.close() dbConn.close() message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Scan", " Amcache Keys Have Been Parsed " ) IngestServices.getInstance().postMessage(message) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Scan", " Beginning VirusTotal Scan " ) IngestServices.getInstance().postMessage(message) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions mydb = Temp_Dir + "\\" + str(file.getId()) + "-myAmcache.db3" try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) except SQLException as e: self.Error_Message.setText("Error Opening Settings") # First check that the 'root_file' table exists try: stmt = dbConn.createStatement() SQL_Statement = 'SELECT COUNT(*) as count FROM sqlite_master WHERE type = "table" AND name = "root_file";' resultSet = stmt.executeQuery(SQL_Statement) self.root_file_exists = int(resultSet.getString("count")) except: self.log(Level.INFO, "LOOK HERE: it's not working.") # If it does, count the number of rows in the table if self.root_file_exists: self.log(Level.INFO, "root_file table exists. Counting rows.") try: stmt = dbConn.createStatement() SQL_Statement = 'SELECT count(*) AS count FROM root_file;' resultSet = stmt.executeQuery(SQL_Statement) self.root_file_count = int(resultSet.getString("count")) except: self.log(Level.INFO, "LOOK HERE: it's not working.") # Now check that the 'inventory_application_file' table exists try: stmt = dbConn.createStatement() SQL_Statement = 'SELECT COUNT(*) as count FROM sqlite_master WHERE type = "table" AND name = "inventory_application_file";' resultSet = stmt.executeQuery(SQL_Statement) self.inventory_application_file_exists = int(resultSet.getString("count")) except: self.log(Level.INFO, "LOOK HERE: it's not working.") # If it does, count the number of rows in the table if self.inventory_application_file_exists: self.log(Level.INFO, "inventory_application_file table exists. Counting rows.") try: stmt = dbConn.createStatement() SQL_Statement = 'SELECT count(*) AS count FROM inventory_application_file;' resultSet = stmt.executeQuery(SQL_Statement) self.inventory_application_file_count = int(resultSet.getString("count")) except: self.log(Level.INFO, "LOOK HERE: it's not working.") stmt.close() dbConn.close() # Now we know how many files we need to scan # Use the sum, to give the user a progress bar self.sum = self.root_file_count + self.inventory_application_file_count progressBar.switchToDeterminate(self.sum) artifact_name = "TSK_" + 'root_file_virustotal_scan'.upper() artifact_desc = "Amcache " + 'root_file_virustotal_scan'.upper() try: self.log(Level.INFO, "Begin creating root_file_virustotal_scan Artifacts") artID_amc = skCase.addArtifactType(artifact_name, artifact_desc) except: self.log(Level.INFO, "ARTIFACTS CREATION ERROR: root_file_virustotal_scan") artID_typeID = skCase.getArtifactTypeID(artifact_name) artID_type = skCase.getArtifactType(artifact_name) Column_Names = ["p_key","file","sha1","vt_positives","vt_ratio","vt_report_link"] Column_Types = ["int","text","text","int","text","text"] # A public VirusTotal API key only allows for 4 requests a minute (1/15 seconds) # Use this to track how much time has passed current_time = time.time() # start scanning root_file SHA1 hashes for i in range(0, self.root_file_count): subprocess.Popen([self.my_exe,'-d', mydb, '-a', self.API_Key, '-t', 'root_file', '-k', str(i + 1)]).communicate()[0] try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + mydb + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK if i == 0: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery('SELECT COUNT(*) as count FROM sqlite_master WHERE type = "table" AND name = "root_file_virustotal_scan";') self.log(Level.INFO, "query SQLite Master table for root_file_virustotal_scan") except SQLException as e: self.log(Level.INFO, "Error querying database for table root_file_virustotal_scan (" + e.getMessage() + ")") if int(resultSet.getString("count")): self.log(Level.INFO, "root_file_virustotal_scan found") for j in range(0,len(Column_Names)): if Column_Types[j].upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + Column_Names[j].upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, Column_Names[j]) except: self.log(Level.INFO, "Attributes Creation Error, " + Column_Names[j] + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + Column_Names[j].upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, Column_Names[j]) except: self.log(Level.INFO, "Attributes Creation Error, " + Column_Names[j] + " ==> ") stmt.close() SQL_String_1 = 'SELECT "p_key","file","sha1","vt_positives","vt_ratio","vt_report_link" from "root_file_virustotal_scan" WHERE p_key = ' + str(i + 1) + ';' stmt = dbConn.createStatement() resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_typeID) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_" + col_name.upper() attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1].upper() == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(col_name))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(col_name))) else: art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, long(resultSet3.getInt(col_name)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(AmcacheScanIngestModuleFactory.moduleName, artID_type, None)) stmt.close() dbConn.close() if not self.Private: after_time = time.time() diff = current_time - after_time time.sleep(15 - diff) current_time = time.time() if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.count += 1 progressBar.progress(self.count) artifact_name = "TSK_" + 'inventory_application_file_virustotal_scan'.upper() artifact_desc = "Amcache " + 'inventory_application_file_virustotal_scan'.upper() try: self.log(Level.INFO, "Begin creating inventory_application_file_virustotal_scan Artifacts") artID_amc = skCase.addArtifactType(artifact_name, artifact_desc) except: self.log(Level.INFO, "ARTIFACTS CREATION ERROR: inventory_application_file_virustotal_scan") artID_typeID = skCase.getArtifactTypeID(artifact_name) artID_type = skCase.getArtifactType(artifact_name) # start scanning 'inventory_application_file' SHA1 hashes for i in range(0, self.inventory_application_file_count): subprocess.Popen([self.my_exe,'-d', mydb, '-a', self.API_Key, '-t', 'inventory_application_file', '-k', str(i + 1)]).communicate()[0] try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + mydb + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK if i == 0: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery('SELECT COUNT(*) as count FROM sqlite_master WHERE type = "table" AND name = "inventory_application_file_virustotal_scan";') self.log(Level.INFO, "query SQLite Master table for inventory_application_file_virustotal_scan") except SQLException as e: self.log(Level.INFO, "Error querying database for table inventory_application_file_virustotal_scan (" + e.getMessage() + ")") if int(resultSet.getString("count")): self.log(Level.INFO, "inventory_application_file_virustotal_scan found") for j in range(0,len(Column_Names)): if Column_Types[j].upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + Column_Names[j].upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, Column_Names[j]) except: self.log(Level.INFO, "Attributes Creation Error, " + Column_Names[j] + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + Column_Names[j].upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, Column_Names[j]) except: self.log(Level.INFO, "Attributes Creation Error, " + Column_Names[j] + " ==> ") stmt.close() SQL_String_1 = 'SELECT "p_key","file","sha1","vt_positives","vt_ratio","vt_report_link" from "inventory_application_file_virustotal_scan" WHERE p_key = ' + str(i + 1) + ';' stmt = dbConn.createStatement() resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_typeID) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_" + col_name.upper() attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1].upper() == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(col_name))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, resultSet3.getString(col_name))) else: art.addAttribute(BlackboardAttribute(attID_ex1, AmcacheScanIngestModuleFactory.moduleName, long(resultSet3.getInt(col_name)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(AmcacheScanIngestModuleFactory.moduleName, artID_type, None)) stmt.close() dbConn.close() if not self.Private: after_time = time.time() diff = current_time - after_time time.sleep(15 - diff) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK current_time = time.time() self.count += 1 progressBar.progress(self.count) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Scan", " VirusTotal Scan Complete " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if len(self.List_Of_tables) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseAmcache", " No Amcache tables Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "Amcache.hve") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "amcache") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Amcache Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + "\Amcache\Amcache.hve Parm 2 ==> " + Temp_Dir + "\Amcache.db3") subprocess.Popen([self.path_to_exe, os.path.join(temp_dir, "Amcache.hve"), os.path.join(temp_dir, "Amcache.db3")]).communicate()[0] for file in files: # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "Amcache.db3") self.log(Level.INFO, "Path the Amcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. for am_table_name in self.List_Of_tables: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('" + am_table_name + "'); ") # resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('associated_file_entries', " + \ # "'unassociated_programs', 'program_entries'); ") self.log(Level.INFO, "query SQLite Master table for " + am_table_name) except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" artifact_name = "TSK_" + table_name.upper() artifact_desc = "Amcache " + table_name.upper() #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, "Artifact_Name ==> " + artifact_name) #self.log(Level.INFO, "Artifact_desc ==> " + artifact_desc) #self.log(Level.INFO, SQL_String_2) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_amc = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_amc = skCase.getArtifactTypeID(artifact_name) artID_amc_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("type")) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("name")) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_amc) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParseAmcacheIngestModuleFactory.moduleName, artID_amc_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\" + file.getName()) except: self.log(Level.INFO, "removal of Amcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of Amcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Parser", " Amcache Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log( Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase() skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType("TSK_EVTX_LOGS", "Windows Event Logs") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_BY_ID", "Windows Event Logs By Event Id") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_evtx_Long = skCase.getArtifactTypeID( "TSK_EVTX_LOGS_BY_ID") try: attID_ev_fn = skCase.addArtifactAttributeType( "TSK_EVTX_FILE_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log( Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType( "TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType( "TSK_EVTX_COMPUTER_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log( Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log( Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> " ) try: attID_ev_el = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_LEVEL", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType( "TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log( Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType( "TSK_EVTX_IDENTIFIER", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType( "TSK_EVTX_SOURCE_NAME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType( "TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_TIME", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType( "TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_BY_ID") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType( "TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.evtx") # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "EventLogs") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Event Log Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source " + self.path_to_exe + " parm 1 ==> " + temp_dir + " Parm 2 ==> " + os.path.join(Temp_Dir, "\EventLogs.db3")) subprocess.Popen([ self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "EventLogs.db3") ]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log( Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # files = [] # fileManager = Case.getCurrentCase().getServices().getFileManager() # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. if self.List_Of_Events[0] != 'ALL': try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " and Event_Identifier in ('" + self.Event_Id_List + "');" self.log( Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log( Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString( "Computer_Name") Event_Identifier = resultSet.getInt( "Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString( "Event_Source_Name") Event_User_Security_Identifier = resultSet.getString( "Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString( "Event_Detail_Text") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxByEventIDIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxByEventIDIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxByEventIDIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time_Epoch)) else: try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" self.log( Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log( Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.log(Level.INFO, "This is the to see what the FU is") # Cycle through each row and create artifacts while resultSet_1.next(): try: self.log(Level.INFO, "This is the to see what the FU is 2") #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt( "Event_Identifier") Event_ID_Count = resultSet_1.getInt( "Number_Of_Events") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") self.log(Level.INFO, "This is the to see what the FU is 3") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log( Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up try: if self.List_Of_Events[0] != 'ALL': stmt.close() else: stmt_1.close() dbConn.close() os.remove(lclDbPath) except: self.log( Level.INFO, "Error closing the statment, closing the database or removing the file" ) #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log( Level.INFO, "removal of Event Log file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed ") IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); Temp_Dir = Case.getCurrentCase().getTempDirectory() fileManager = Case.getCurrentCase().getServices().getFileManager() message_desc = '' for Plist_Files in self.List_Of_DBs: files = fileManager.findFiles(dataSource, Plist_Files) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "\\" + \ file.getName() + "-" + str(file.getId()) + " " + Temp_Dir + "\\Plist_File-" + str(file.getId()) + ".db3 ") pipe = Popen([self.path_to_exe, os.path.join(Temp_Dir, (file.getName() + "-" + str(file.getId()))), \ os.path.join(Temp_Dir, ("Plist_File-" + str(file.getId()) + ".db3"))], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if 'not a valid Plist' in out_text: message_desc = message_desc + "Error Parsing plist file " + file.getName() + ". File not parsed \n" else: extDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "Plist_File-" + str(file.getId()) + ".db3") #self.log(Level.INFO, "Path the sqlite database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extDbPath + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name, type from SQLITE_MASTER where type in ('table','view');") #self.log(Level.INFO, "query SQLite Master table") #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet.getString("tbl_name") + " ") # while resultSet4.next(): row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_" + file.getName() artifact_desc = "Plist " + file.getName() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_plist = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_plist = skCase.getArtifactTypeID(artifact_name) artID_plist_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_PLIST_" + resultSet2.getString("name").upper() #self.log(Level.INFO, "attribure id for " + attribute_name + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_plist) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_PLIST_" + Column_Names[Column_Number - 1] #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParsePlists2DBDelRecIngestModuleFactory.moduleName, \ artID_plist_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Clean up stmt.close() dbConn.close() os.remove(os.path.join(Temp_Dir, "Plist_File-" + str(file.getId()) + ".db3")) os.remove(os.path.join(Temp_Dir, file.getName() + "-" + str(file.getId()))) # After all databases, post a message to the ingest messages in box. if len(message_desc) == 0: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", " Plist files have been parsed " ) IngestServices.getInstance().postMessage(message) else: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", message_desc + " Plist files have been parsed with the above files failing " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log( Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") skCase = Case.getCurrentCase().getSleuthkitCase() head, tail = os.path.split(os.path.abspath(__file__)) settings_db = head + "\\alexa_db.db3" #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) macos_recents.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the database table for unique file names try: stmt = dbConn.createStatement() process_data_sql = "Select distinct file_name from alexa_databases" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "Query Database table for unique file names") except SQLException as e: self.log(Level.INFO, "Error querying database for unique file names") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, resultSet.getString("file_name")) numFiles = len(files) self.log( Level.INFO, "found " + str(numFiles) + " files for file_name ==> " + resultSet.getString("file_name")) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join( Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) #self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn_x = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + "-" + str(file.getId()) + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK try: stmt_sql = dbConn.createStatement() process_stmt_sql = "select artifact_name, artifact_description, sql_to_run from alexa_databases where file_name = '" + resultSet.getString( "file_name") + "';" self.log(Level.INFO, process_stmt_sql) resultSet_sql = stmt_sql.executeQuery(process_stmt_sql) self.log(Level.INFO, "Query Database table for sql statements") except SQLException as e: self.log( Level.INFO, "Error querying database for sql_statements for file " + resultSet.getString("file_name")) # return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet_sql.next(): try: stmt_1 = dbConn_x.createStatement() sql_to_run = resultSet_sql.getString("sql_to_run") self.log(Level.INFO, sql_to_run) resultSet_3 = stmt_1.executeQuery(sql_to_run) self.log(Level.INFO, "query " + sql_to_run) except SQLException as e: self.log( Level.INFO, "Error querying database for " + resultSet.getString("file_name")) continue # return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_sql = skCase.addArtifactType( resultSet_sql.getString("artifact_name"), resultSet_sql.getString("artifact_description")) except: self.log( Level.INFO, "Artifacts Creation Error, for artifact. ==> " + resultSet_sql.getString("artifact_name")) artID_hst = skCase.getArtifactTypeID( resultSet_sql.getString("artifact_name")) artID_hst_evt = skCase.getArtifactType( resultSet_sql.getString("artifact_name")) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log( Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range(1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_ALEXA_" + meta.getColumnLabel(x).upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, meta.getColumnLabel(x)) except: self.log( Level.INFO, "Attributes Creation Error, " + "TSK_ALEXA_" + meta.getColumnLabel(x) + " ==> ") column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType( "TSK_ALEXA_" + col_name.upper()) self.log( Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log( Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getString( col_name))) except: self.log( Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getInt(col_name))) except: self.log( Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType( ) == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getString( col_name))) except: self.log( Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute( BlackboardAttribute( attID_ex1, Alexa_DB_ParseIngestModuleFactory .moduleName, resultSet_3.getReal(col_name))) except: self.log( Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log( Level.INFO, "Error getting values from sql statement ==> " + resultSet_sql.getString("artifact_name")) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( Alexa_DB_ParseIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_1.close() stmt_sql.close() dbConn_x.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") skCase = Case.getCurrentCase().getSleuthkitCase(); head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "Alexa_DB.db3") #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) Alexa_DB.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the database table for unique file names try: stmt = dbConn.createStatement() process_data_sql = "Select distinct file_name from alexa_databases" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "Query Database table for unique file names") except SQLException as e: self.log(Level.INFO, "Error querying database for unique file names") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, resultSet.getString("file_name")) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files for file_name ==> " + resultSet.getString("file_name")) progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) #self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn_x = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + "-" + str(file.getId()) + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK try: stmt_sql = dbConn.createStatement() process_stmt_sql = "select artifact_name, artifact_description, sql_to_run from alexa_databases where file_name = '" + resultSet.getString("file_name") + "';" self.log(Level.INFO, process_stmt_sql) resultSet_sql = stmt_sql.executeQuery(process_stmt_sql) self.log(Level.INFO, "Query Database table for sql statements") except SQLException as e: self.log(Level.INFO, "Error querying database for sql_statements for file " + resultSet.getString("file_name")) # return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet_sql.next(): try: stmt_1 = dbConn_x.createStatement() sql_to_run = resultSet_sql.getString("sql_to_run") self.log(Level.INFO, sql_to_run) resultSet_3 = stmt_1.executeQuery(sql_to_run) self.log(Level.INFO, "query " + sql_to_run) except SQLException as e: self.log(Level.INFO, "Error querying database for " + resultSet.getString("file_name")) continue # return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_sql = skCase.addArtifactType(resultSet_sql.getString("artifact_name"), resultSet_sql.getString("artifact_description")) except: self.log(Level.INFO, "Artifacts Creation Error, for artifact. ==> " + resultSet_sql.getString("artifact_name")) artID_hst = skCase.getArtifactTypeID(resultSet_sql.getString("artifact_name")) artID_hst_evt = skCase.getArtifactType(resultSet_sql.getString("artifact_name")) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range (1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) try: attID_ex1 = skCase.addArtifactAttributeType("TSK_ALEXA_" + meta.getColumnLabel(x).upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, meta.getColumnLabel(x)) except: self.log(Level.INFO, "Attributes Creation Error, " + "TSK_ALEXA_" + meta.getColumnLabel(x) + " ==> ") column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType("TSK_ALEXA_" + col_name.upper()) self.log(Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log(Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getReal(col_name))) except: self.log(Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log(Level.INFO, "Error getting values from sql statement ==> " + resultSet_sql.getString("artifact_name")) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(Alexa_DB_ParseIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_1.close() stmt_sql.close() dbConn_x.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() # Create Recyclebin directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "recyclebin") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "recyclebin Directory already exists " + temp_dir) systemAbsFile = [] files = fileManager.findFiles(dataSource, "SAM", "Windows/System32/Config") numFiles = len(files) self.log(Level.INFO, "Number of SAM Files found ==> " + str(numFiles)) userRids = {} for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Parent Path ==> " + str(file.getParentPath())) if file.getParentPath() == '/Windows/System32/Config/': # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Process the SAM Registry File getting the Username and RID userRids = self.processSAMFile(lclDbPath) else: self.log(Level.INFO, "Skipping File " + file.getName() + " In Path " + file.getParentPath()) # Setup Artifact and Attributes try: self.log(Level.INFO, "Begin Create New Artifacts") artID_ls = skCase.addArtifactType( "TSK_RECYCLE_BIN", "Recycle Bin") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") try: attIdFilePath = skCase.addArtifactAttributeType("TSK_FILE_NAME_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Path File Name") except: attIdFilePath = skCase.getAttributeType("TSK_FILE_NAME_PATH") self.log(Level.INFO, "Attributes Creation Error, TSK_FILE_NAME_PATH ==> ") try: attIdDelTime = skCase.addArtifactAttributeType("TSK_FILE_DEL_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Deletion Time") except: attIdDelTime = skCase.getAttributeType("TSK_FILE_DEL_TIME") self.log(Level.INFO, "Attributes Creation Error, TSK_FILE_DEL_TIME ==> ") artifactName = "TSK_RECYCLE_BIN" artId = skCase.getArtifactTypeID(artifactName) attIdUserName = skCase.getAttributeType("TSK_USER_NAME") iFiles = fileManager.findFiles(dataSource, "$I%") numFiles = len(files) self.log(Level.INFO, "Number of $I Files found ==> " + str(numFiles)) for iFile in iFiles: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Save the $I locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, iFile.getName()) ContentUtils.writeToFile(iFile, File(lclDbPath)) self.log(Level.INFO, "Getting File " + iFile.getName() + " In Path " + iFile.getParentPath()) rFileName = iFile.getName().replace("$I", "$R") rFiles = fileManager.findFiles(dataSource, rFileName, iFile.getParentPath()) numRFiles = len(files) self.log(Level.INFO, "Number of $R Files found ==> " + str(numRFiles)) for rFile in rFiles: if (rFile.getParentPath() == iFile.getParentPath()): fileNamePath, deletedTimeStamp = self.getFileMetadata(os.path.join(temp_dir, iFile.getName())) if fileNamePath != None: art = rFile.newArtifact(artId) self.log(Level.INFO, "Parent Path ==> " + iFile.getParentPath()) startSearch = iFile.getParentPath().rfind("-") userRid = iFile.getParentPath()[startSearch + 1:].replace('/','') art.addAttribute(BlackboardAttribute(attIdUserName, RecBin2IngestModuleFactory.moduleName, userRids.get(userRid, "Not Found!"))) art.addAttribute(BlackboardAttribute(attIdFilePath, RecBin2IngestModuleFactory.moduleName, fileNamePath)) art.addAttribute(BlackboardAttribute(attIdDelTime, RecBin2IngestModuleFactory.moduleName, deletedTimeStamp)) #Clean up recyclebin directory and files try: shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of directory tree failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "RecycleBin", " Recycle Bin Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "usrclass.dat") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Appxreg_Programs") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) #os.remove(extractedFile) for file in files: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) dbFile = os.path.join(temporaryDirectory, "appxreg.db3") self.log(Level.INFO, "Running prog ==> " + "appxreg.exe " + extractedFile + dbFile) pipe = Popen([self.path_to_exe, extractedFile, dbFile], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) artIdInsProg = skCase.getArtifactTypeID("TSK_INSTALLED_PROG") artIdInsProgType = skCase.getArtifactType("TSK_INSTALLED_PROG") moduleName = ProcessAppxregProgramsIngestModuleFactory.moduleName try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % dbFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select package_name, substr(install_dttm_ms_epoch,1,11) -11644473600 Installed_DTTM from installed_apps;") self.log(Level.INFO, "query Installed_Application tables") except SQLException as e: self.log(Level.INFO, "Error querying database for appx tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK # Cycle through each row and get the installed programs and install time while resultSet.next(): try: artInsProg = file.newArtifact(artIdInsProg) attributes = ArrayList() attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, moduleName, resultSet.getString("package_name"))) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), moduleName, resultSet.getInt("Installed_DTTM"))) artInsProg.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artInsProg) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from Appx tables (" + e.getMessage() + ")") # Close the database statement try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Appxreg Installed Programs", " Appxreg Installed Programs Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "store.db", "Store-V2") numFiles = len(files) #self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing moduleDirectory = os.path.join(Case.getCurrentCase().getModuleDirectory(), "spotlight") temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "spotlight") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(moduleDirectory) except: pass #self.log(Level.INFO, "Module directory already exists " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.db file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processSpotlightFile(extractedFile, moduleDirectory) for file in files: # Open the DB using JDBC lclDbPath = os.path.join(moduleDirectory, "spotlight_db.db3") #self.log(Level.INFO, "Path to the mail database is ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: #self.log(Level.INFO, "Could not open database file (not SQLite) " + lclDbPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.processSpotlightDb(dbConn, file) # Clean up try: dbConn.close() shutil.rmtree(temporaryDirectory) except: self.log(Level.INFO, "removal of spotlight database failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Spotlight Parser", " Spotlight Db Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_jl_ad = skCase.addArtifactType( "TSK_JL_AD", "Jump List Auto Dest") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") try: attID_jl_fn = skCase.addArtifactAttributeType("TSK_JLAD_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "JumpList File Name") except: self.log(Level.INFO, "Attributes Creation Error, JL AD File Name. ==> ") try: attID_jl_fg = skCase.addArtifactAttributeType("TSK_JLAD_FILE_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Description") except: self.log(Level.INFO, "Attributes Creation Error, File Description. ==> ") try: attID_jl_in = skCase.addArtifactAttributeType("TSK_JLAD_ITEM_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Item Name") except: self.log(Level.INFO, "Attributes Creation Error, Item Name. ==> ") try: attID_jl_cl = skCase.addArtifactAttributeType("TSK_JLAD_COMMAND_LINE_ARGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Command Line Args") except: self.log(Level.INFO, "Attributes Creation Error, Command Line Arguments. ==> ") try: attID_jl_dt = skCase.addArtifactAttributeType("TSK_JLAD_Drive Type", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Type") except: self.log(Level.INFO, "Attributes Creation Error, Drive Type. ==> ") try: attID_jl_dsn = skCase.addArtifactAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Serial Number") except: self.log(Level.INFO, "Attributes Creation Error, Drive Serial Number. ==> ") try: attID_jl_des = skCase.addArtifactAttributeType("TSK_JLAD_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Description") except: self.log(Level.INFO, "Attributes Creation Error, Description. ==> ") try: attID_jl_evl = skCase.addArtifactAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Env Var Location") except: self.log(Level.INFO, "Attributes Creation Error, Env Var Location. ==> ") try: attID_jl_fat = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ACCESS_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Access Time") except: self.log(Level.INFO, "Attributes Creation Error, File Access Time. ==> ") try: attID_jl_faf = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Attribute Flags") except: self.log(Level.INFO, "Attributes Creation Error, File Attribute Flags. ==> ") try: attID_jl_fct = skCase.addArtifactAttributeType("TSK_JLAD_FILE_CREATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Creation Time") except: self.log(Level.INFO, "Attributes Creation Error, File Creation Time. ==> ") try: attID_jl_fmt = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Modification Time") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_fs = skCase.addArtifactAttributeType("TSK_JLAD_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_jl_ic = skCase.addArtifactAttributeType("TSK_JLAD_ICON_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Icon Location") except: self.log(Level.INFO, "Attributes Creation Error, Icon Location. ==> ") try: attID_jl_ltid = skCase.addArtifactAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Link Target Identifier Data") except: self.log(Level.INFO, "Attributes Creation Error, Link Target Identifier Data. ==> ") try: attID_jl_lp = skCase.addArtifactAttributeType("TSK_JLAD_LOCAL_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Local Path") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_mi = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Machine Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Machine Identifier. ==> ") try: attID_jl_np = skCase.addArtifactAttributeType("TSK_JLAD_NETWORK_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Network Path") except: self.log(Level.INFO, "Attributes Creation Error, Network Path. ==> ") try: attID_jl_rp = skCase.addArtifactAttributeType("TSK_JLAD_RELATIVE_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Relative Path") except: self.log(Level.INFO, "Attributes Creation Error, Relative Path. ==> ") try: attID_jl_vl = skCase.addArtifactAttributeType("TSK_JLAD_VOLUME_LABEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Volume Label") except: self.log(Level.INFO, "Attributes Creation Error, Volume Label. ==> ") try: attID_jl_wc = skCase.addArtifactAttributeType("TSK_JLAD_WORKING_DIRECTORY", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Working Directory") except: self.log(Level.INFO, "Attributes Creation Error, Working Directory. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") artID_jl_ad_evt = skCase.getArtifactType("TSK_JL_AD") attID_jl_fn = skCase.getAttributeType("TSK_JLAD_FILE_NAME") attID_jl_fg = skCase.getAttributeType("TSK_JLAD_FILE_DESCRIPTION") attID_jl_in = skCase.getAttributeType("TSK_JLAD_ITEM_NAME") attID_jl_cl = skCase.getAttributeType("TSK_JLAD_COMMAND_LINE_ARGS") attID_jl_dt = skCase.getAttributeType("TSK_JLAD_Drive Type") attID_jl_dsn = skCase.getAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER") attID_jl_des = skCase.getAttributeType("TSK_JLAD_DESCRIPTION") attID_jl_evl = skCase.getAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION") attID_jl_fat = skCase.getAttributeType("TSK_JLAD_FILE_ACCESS_TIME") attID_jl_faf = skCase.getAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS") attID_jl_fct = skCase.getAttributeType("TSK_JLAD_FILE_CREATION_TIME") attID_jl_fmt = skCase.getAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME") attID_jl_fs = skCase.getAttributeType("TSK_JLAD_FILE_SIZE") attID_jl_ic = skCase.getAttributeType("TSK_JLAD_ICON_LOCATION") attID_jl_ltid = skCase.getAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA") attID_jl_lp = skCase.getAttributeType("TSK_JLAD_LOCAL_PATH") attID_jl_mi = skCase.getAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER") attID_jl_np = skCase.getAttributeType("TSK_JLAD_NETWORK_PATH") attID_jl_rp = skCase.getAttributeType("TSK_JLAD_RELATIVE_PATH") attID_jl_vl = skCase.getAttributeType("TSK_JLAD_VOLUME_LABEL") attID_jl_wd = skCase.getAttributeType("TSK_JLAD_WORKING_DIRECTORY") #self.log(Level.INFO, "Artifact id for TSK_PREFETCH ==> " + str(artID_pf)) # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "JL_AD") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "JL_AD Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + Temp_Dir + "\JL_AD.db3") output = subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "JL_AD.db3"), self.path_to_app_id_db], stdout=subprocess.PIPE).communicate()[0] #self.log(Level.INFO, "Output for the JL_AD program ==> " + output) self.log(Level.INFO, " Return code is ==> " + output) # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Temp_Dir, "JL_AD.db3") self.log(Level.INFO, "Path to the JL_AD database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") for file in files: file_name = os.path.splitext(file.getName())[0] self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "select File_Name, File_Description, Item_Name, command_line_arguments, drive_type, drive_serial_number, " + \ " description, environment_variables_location, file_access_time, file_attribute_flags, file_creation_time, " + \ " file_modification_time, file_size, icon_location, link_target_identifier_data, local_path, " + \ " machine_identifier, network_path, relative_path, volume_label, working_directory " + \ " from Automatic_destinations_JL where upper(File_Name) = upper('" + file_name + "');" # " from Automatic_destinations_JL where File_Name||'.automaticDestinations-ms' = '" + file_name + "';" #self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # self.log(Level.INFO, "Result (" + resultSet.getString("File_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Recovered_Record") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Computer_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier_Qualifiers") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Level") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Offset") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Source_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_User_Security_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time_Epoch") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Detail_Text") + ")") File_Name = resultSet.getString("File_Name") File_Description = resultSet.getString("File_Description") Item_Name = resultSet.getString("Item_Name") Command_Line_Arguments = resultSet.getString("command_line_arguments") Drive_Type = resultSet.getInt("drive_type") Drive_Serial_Number = resultSet.getInt("drive_serial_number") Description = resultSet.getString("description") Environment_Variables_Location = resultSet.getString("environment_variables_location") File_Access_Time = resultSet.getString("file_access_time") File_Attribute_Flags = resultSet.getInt("file_attribute_flags") File_Creation_Time = resultSet.getString("file_creation_time") File_Modification_Time = resultSet.getString("file_modification_time") File_Size = resultSet.getInt("file_size") Icon_Location = resultSet.getString("icon_location") Link_Target_Identifier_Data = resultSet.getString("link_target_identifier_data") Local_Path = resultSet.getString("local_path") Machine_Identifier = resultSet.getString("machine_identifier") Network_Path = resultSet.getString("network_path") Relative_Path = resultSet.getString("relative_path") Volume_Label = resultSet.getString("volume_label") Working_Directory = resultSet.getString("working_directory") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") #fileManager = Case.getCurrentCase().getServices().getFileManager() #files = fileManager.findFiles(dataSource, Prefetch_File_Name) #for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Not the proper way to do it but it will work for the time being. art = file.newArtifact(artID_jl_ad) # This is for when proper atributes can be created. art.addAttributes(((BlackboardAttribute(attID_jl_fn, JumpListADDbIngestModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_jl_fg, JumpListADDbIngestModuleFactory.moduleName, File_Description)), \ (BlackboardAttribute(attID_jl_in, JumpListADDbIngestModuleFactory.moduleName, Item_Name)), \ (BlackboardAttribute(attID_jl_cl, JumpListADDbIngestModuleFactory.moduleName, Command_Line_Arguments)), \ (BlackboardAttribute(attID_jl_dt, JumpListADDbIngestModuleFactory.moduleName, Drive_Type)), \ (BlackboardAttribute(attID_jl_dsn, JumpListADDbIngestModuleFactory.moduleName, Drive_Serial_Number)), \ (BlackboardAttribute(attID_jl_des, JumpListADDbIngestModuleFactory.moduleName, Description)), \ (BlackboardAttribute(attID_jl_evl, JumpListADDbIngestModuleFactory.moduleName, Environment_Variables_Location)), \ (BlackboardAttribute(attID_jl_fat, JumpListADDbIngestModuleFactory.moduleName, File_Access_Time)), \ (BlackboardAttribute(attID_jl_faf, JumpListADDbIngestModuleFactory.moduleName, File_Attribute_Flags)), \ (BlackboardAttribute(attID_jl_fct, JumpListADDbIngestModuleFactory.moduleName, File_Creation_Time)), \ (BlackboardAttribute(attID_jl_fmt, JumpListADDbIngestModuleFactory.moduleName, File_Modification_Time)), \ (BlackboardAttribute(attID_jl_fs, JumpListADDbIngestModuleFactory.moduleName, File_Size)), \ (BlackboardAttribute(attID_jl_ic, JumpListADDbIngestModuleFactory.moduleName, Icon_Location)), \ (BlackboardAttribute(attID_jl_ltid, JumpListADDbIngestModuleFactory.moduleName, Link_Target_Identifier_Data)), \ (BlackboardAttribute(attID_jl_lp, JumpListADDbIngestModuleFactory.moduleName, Local_Path)), \ (BlackboardAttribute(attID_jl_mi, JumpListADDbIngestModuleFactory.moduleName, Machine_Identifier)), \ (BlackboardAttribute(attID_jl_np, JumpListADDbIngestModuleFactory.moduleName, Network_Path)), \ (BlackboardAttribute(attID_jl_rp, JumpListADDbIngestModuleFactory.moduleName, Relative_Path)), \ (BlackboardAttribute(attID_jl_vl, JumpListADDbIngestModuleFactory.moduleName, Volume_Label)), \ (BlackboardAttribute(attID_jl_wd, JumpListADDbIngestModuleFactory.moduleName, Working_Directory)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) # Clean up skCase_Tran.commit() stmt.close() dbConn.close() try: os.remove(lclDbPath) except: self.log(Level.INFO, "Failed to remove the file " + lclDbPath) #skCase.close() #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of JL_AD file failed " + os.path.join(temp_dir, file.getName())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of JL_AD directory failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "JumpList AD", " JumpList AD Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() connectionFiles = fileManager.findFiles(dataSource, "Connection.log%", ".atomic") numFiles = len(connectionFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Atomic Wallet directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Atomic_Wallet") try: os.mkdir(temporaryDirectory) except: pass # get and process connections for file in connectionFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processConnectionLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # Get and process history file historyFiles = fileManager.findFiles(dataSource, "history.json", ".atomic") numFiles = len(historyFiles) for file in historyFiles: if "-slack" not in file.getName(): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processHistory(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) try: shutil.rmtree(temporaryDirectory) except: self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Facebook Chat", " Facebook Chat Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet #progressBar.switchToIndeterminate() # get current case and the ActivitiesCache abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "ActivitiesCache%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; moduleName = ProcessActivitiesCacheIngestModuleFactory.moduleName # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "ActivitiesCache") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) filePathId = {} for file in files: fileName = file.getName() if fileName.endswith(".db"): filePathId[file.getParentPath()] = file.getId() self.log(Level.INFO, "file path and id ==> " + str(file.getParentPath()) + " <> " + str(file.getId()) + " <> " + str(fileName)) if numFiles > 0: for artifact in self.stringColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") for artifact in self.dateColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") try: #self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_ACTCACHE_DB") artID_art = skCase.addArtifactType("TSK_ACTCACHE_DB", "Activities Cache Timeline DB") except: self.log(Level.INFO, "Artifacts Creation Error, artifact TSK_ACTCACHE_DB exists. ==> ") # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions fileId = filePathId[file.getParentPath()] extractedFile = os.path.join(temporaryDirectory, str(fileId) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) userpath = file.getParentPath() username = userpath.split('/') #self.log(Level.INFO, "Getting Username " + username[2] ) # for file in files: fileName = file.getName() if fileName.endswith(".db"): extractedFile = os.path.join(temporaryDirectory, str(filePathId[file.getParentPath()]) + "-" + file.getName()) artActCacheId = skCase.getArtifactTypeID("TSK_ACTCACHE_DB") self.log(Level.INFO, "Artifact id ==> " + str(artActCacheId)) artActCache = skCase.getArtifactType("TSK_ACTCACHE_DB") moduleName = ProcessActivitiesCacheIngestModuleFactory.moduleName try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extractedFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select hex(id) TSK_ACTCACHE_ID, appId TSK_ACTCACHE_APP_ID, " + \ " cast(Payload as Text) TSK_ACTCACHE_PAYLOAD, " + \ " ActivityType TSK_ACTCACHE_ACT_TYPE, ActivityStatus TSK_ACTCACHE_STATUS, " + \ " startTime TSK_ACTCACHE_ST_TIME, EndTime TSK_ACTCACHE_ENDTIME, " + \ " LastModifiedTime TSK_ACTCACHE_LAST_MOD, ExpirationTime TSK_ACTCACHE_EXP_TIME, " + \ " createdInCloud TSK_ACTCACHE_CRT_CLOUD, " + \ " LastModifiedOnClient TSK_ACTCACHE_LAST_MOD_CLIENT, " + \ " OriginalLastModifiedOnClient TSK_ACTCACHE_ORIG_LMOC, " + \ " isLocalOnly TSK_ACTCACHE_LOCAL_ONLY, Etag TSK_ACTCACHE_ETAG, " + \ " packageIdHash TSK_ACTCACHE_PKGID_HASH, " + \ " PlatformDeviceId TSK_ACTCACHE_PLAT_DEVID from smartlookup") #self.log(Level.INFO, "query smartlookup table") except SQLException as e: self.log(Level.INFO, "Error querying database for smartlookup tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK meta = resultSet.getMetaData() columnCount = meta.getColumnCount() columnNames = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columnCount)) for x in range (1, columnCount + 1): #self.log(Level.INFO, "Column Count ==> " + str(x)) #self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) columnNames.append(meta.getColumnLabel(x)) # Cycle through each row and get the data self.log(Level.INFO, "Start PRocessing") while resultSet.next(): try: artifact = file.newArtifact(artActCacheId) attributes = ArrayList() attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), moduleName, username[2])) for x in range(0, columnCount): if columnNames[x] in self.dateColumn: #self.log(Level.INFO, "Date ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSet.getInt(columnNames[x]))) else: if columnNames[x] == "TSK_ACTCACHE_ID": #self.log(Level.INFO, "ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSet.getString(columnNames[x]))) else: attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSet.getString(columnNames[x]))) #self.log(Level.INFO, "Column Count ==> " + str(x)) artifact.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artifact) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from smartlookup table (" + e.getMessage() + ")") # Close the database statement try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ActivitiesCache", " ActivitiesCache's Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() #we're not using indexing # Get case case = Case.getCurrentCase().getSleuthkitCase() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() swisscom_settings_file = fileManager.findFiles(dataSource, "com.swisscom.internetbox_preferences.xml") if self.local_settings.get_parse_settings() else [] qbee_settings_file = fileManager.findFiles(dataSource, "com.vestiacom.qbeecamera_preferences.xml") if self.local_settings.get_parse_settings() else [] num_files = len(qbee_settings_file) + len(swisscom_settings_file) self.log(Level.INFO, "found " + str(num_files) + " files") progressBar.switchToDeterminate(num_files) file_count = 0 # Settings tmp_dir = Case.getCurrentCase().getTempDirectory() out_dir = Case.getCurrentCase().getModuleDirectory() if self.local_settings.get_parse_settings(): # Settings File for Qbee App for file in qbee_settings_file: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) file_count += 1 # Write to file (any way to contour this?) lcl_setting_path = os.path.join(tmp_dir, str(file.getId()) + ".xml") ContentUtils.writeToFile(file, File(lcl_setting_path)) out_file_name = file.getName() + "_decrypted.json" dest_json_path = os.path.join(out_dir, out_file_name) pipe = Popen([self.path_to_exe, lcl_setting_path, dest_json_path], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) with open(dest_json_path) as json_file: settings_clear = json.load(json_file) self.log(Level.INFO, "Settings: " + str(settings_clear)) try: settings_clear = settings_clear['decrypted_settings'][0] except KeyError: raise KeyError("JSON File format unknown") except IndexError: self.log(Level.INFO, "Error: No AES key candidates found while decrypting settings file.") continue json_info = os.stat(dest_json_path) print json_info.st_mtime encoding = TskData.EncodingType.valueOf(0) dest_json_rel_path = os.path.relpath(dest_json_path, Case.getCurrentCase().getCaseDirectory()) json_file = fileManager.addDerivedFile(out_file_name, dest_json_rel_path, int(json_info.st_size), int(json_info.st_ctime), int(json_info.st_ctime), int(json_info.st_atime), int(json_info.st_mtime), True, file, "QBee Module", "Qbee Module", "0.1", "", encoding) art_type_id = case.getArtifactTypeID("ESC_GENERIC_LOGIN") art_type = case.getArtifactType("ESC_GENERIC_LOGIN") # Make an artifact on the blackboard. # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = json_file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, QBeeIngestModuleFactory.moduleName, "QBee") art.addAttribute(att) # Artifact art = file.newArtifact(art_type_id) # Attributes att_login_username_id = case.getAttributeType("ESC_GENERIC_LOGIN_USERNAME") att_login_secret_id = case.getAttributeType("ESC_GENERIC_LOGIN_SECRET") att_login_secret_type_id = case.getAttributeType("ESC_GENERIC_LOGIN_SECRET_TYPE") att_login_service_id = case.getAttributeType("ESC_GENERIC_LOGIN_SERVICE") att_login_username = BlackboardAttribute(att_login_username_id, QBeeIngestModuleFactory.moduleName, settings_clear['qbeeUser']) att_login_secret = BlackboardAttribute(att_login_secret_id, QBeeIngestModuleFactory.moduleName, settings_clear['qbeePassword']) att_login_secret_type = BlackboardAttribute(att_login_secret_type_id, QBeeIngestModuleFactory.moduleName, "Password") att_login_service = BlackboardAttribute(att_login_service_id, QBeeIngestModuleFactory.moduleName, "QBee") art.addAttribute(att_login_username) art.addAttribute(att_login_secret) art.addAttribute(att_login_secret_type) art.addAttribute(att_login_service) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(QBeeIngestModuleFactory.moduleName, art_type, None)) progressBar.progress(file_count) # Settings File for Swisscom Home App for file in swisscom_settings_file: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) file_count += 1 # Write to file (any way to contour this?) lcl_setting_path = os.path.join(tmp_dir, str(file.getId()) + ".xml") ContentUtils.writeToFile(file, File(lcl_setting_path)) out_file_name = file.getName() + "_decrypted.json" dest_json_path = os.path.join(out_dir, out_file_name) pipe = Popen([self.path_to_exe, lcl_setting_path, dest_json_path], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) with open(dest_json_path) as json_file: settings_clear = json.load(json_file) self.log(Level.INFO, "Settings: " + str(settings_clear)) try: settings_clear = settings_clear['decrypted_settings'][0] except KeyError: raise KeyError("JSON File format unknown") except IndexError: self.log(Level.INFO, "Error: No AES key candidates found while decrypting settings file.") continue json_info = os.stat(dest_json_path) print json_info.st_mtime encoding = TskData.EncodingType.valueOf(0) dest_json_rel_path = os.path.relpath(dest_json_path, Case.getCurrentCase().getCaseDirectory()) json_file = fileManager.addDerivedFile(out_file_name, dest_json_rel_path, int(json_info.st_size), int(json_info.st_ctime), int(json_info.st_ctime), int(json_info.st_atime), int(json_info.st_mtime), True, file, "QBee Module", "Qbee Module", "0.1", "", encoding) # Make an artifact on the blackboard. # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = json_file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, QBeeIngestModuleFactory.moduleName, "Swisscom Home App") art.addAttribute(att) progressBar.progress(file_count) # FINISHED! # Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "QBee Analysis", "Found %d files" % file_count) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "SYSTEM", "config") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Shimcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Shimcache Directory already exists " + Temp_Dir) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Saved File ==> " + lclDbPath) # Run the EXE, saving output to a sqlite database #try: # self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " -i " + Temp_Dir + "\\Shimcache\\" + \ # file.getName() + " -o " + Temp_Dir + "\\Shimcache_db.db3") # pipe = Popen([self.path_to_exe, "-i " + Temp_Dir + "\\Shimcache\\" + file.getName(), "-o " + Temp_Dir + \ # "\\Shimcache_db.db3"], stdout=PIPE, stderr=PIPE) self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "//Shimcache//" + \ file.getName() + " " + Temp_Dir + "//Shimcache_db.db3") pipe = Popen([self.path_to_exe, os.path.join(temp_dir, file.getName()), os.path.join(temp_dir, "Shimcache_db.db3")], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) #except: # self.log(Level.INFO, "Error running program shimcache_parser.") # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "Shimcache_db.db3") self.log(Level.INFO, "Path the system database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for system table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_shim = skCase.addArtifactType("TSK_SHIMCACHE", "Shimcache") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_shim = skCase.getArtifactTypeID("TSK_SHIMCACHE") artID_shim_evt = skCase.getArtifactType("TSK_SHIMCACHE") # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_shim) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_SHIMCACHE_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log(Level.INFO, "Error getting values from Shimcache table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseShimcacheIngestModuleFactory.moduleName, artID_shim_evt, None)) #Clean up EventLog directory and files try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of Shimcache tempdb failed " + lclDbPath) for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of Shimcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Shimcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Shimcache Parser", " Shimcache Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): blackboard = Case.getCurrentCase().getServices().getBlackboard() skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "phone.db") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") self.anyValidFileFound = False for file in files: dbPath = os.path.join(self.temp_dir, str(file.getName())) ContentUtils.writeToFile(file, File(dbPath)) try: Class.forName("org.sqlite.JDBC").newInstance() config = SQLiteConfig() config.setEncoding(SQLiteConfig.Encoding.UTF8) dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % dbPath, config.toProperties()) except Exception as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + str(e) + ")") continue try: full_path = (file.getParentPath() + file.getName()) split = full_path.split('/') try: try: userName = split[-11] except IndexError: userName = "******" self.art_contacts = self.create_artifact_type( "YPA_CONTACTS_" + userName, "User " + userName + " - Contacts", skCase) self.art_messages = self.create_artifact_type( "YPA_MESSAGE_" + userName, "User " + userName + " - SMS", skCase) self.art_mms = self.create_artifact_type( "YPA_MMS_" + userName, "User " + userName + " - MMS", skCase) self.art_pictures = self.create_artifact_type( "YPA_PICTURES_" + userName, "User " + userName + " - Recent Pictures", skCase) self.art_freespace = self.create_artifact_type( "YPA_FREESPACE_" + userName, "User " + userName + " - Rows Recovered(undark)", skCase) self.art_dp = self.create_artifact_type( "YPA_DP_" + userName, "User " + userName + " - Rows Recovered(Delete parser)", skCase) self.art_set = self.create_artifact_type( "YPA_SETTINGS_" + userName, "User " + userName + " - Database Settings", skCase) except Exception as e: self.log(Level.INFO, str(e)) continue stmt = dbConn.createStatement() contacts = stmt.executeQuery(self.contact_query) self.processContacts(contacts, file, blackboard, skCase) stmt = dbConn.createStatement() messages = stmt.executeQuery(self.messages_query) self.processMessages(messages, file, blackboard, skCase) stmt = dbConn.createStatement() mms = stmt.executeQuery(self.mms_query) self.processMms(mms, file, blackboard, skCase) self.anyValidFileFound = True stmt = dbConn.createStatement() prag_uv = stmt.executeQuery("pragma user_version") art = file.newArtifact(self.art_set.getTypeID()) prag_uv.next() art.addAttribute( BlackboardAttribute( self.att_db_uv, YourPhoneIngestModuleFactory.moduleName, prag_uv.getString("user_version"))) self.index_artifact(blackboard, art, self.art_set) if PlatformUtil.isWindowsOS(): try: with open(self.temp_dir + '\\freespace.txt', 'w') as f: subprocess.Popen([ self.path_to_undark, '-i', dbPath, '--freespace' ], stdout=f).communicate() with open(self.temp_dir + '\\freespace.txt', 'r') as f: self.log( Level.INFO, ' '.join([ self.path_to_undark, '-i', dbPath, '--freespace >' ])) self.log(Level.INFO, "called undark") line = f.readline() while line: self.log(Level.INFO, "opened result") art = file.newArtifact( self.art_freespace.getTypeID()) art.addAttribute( BlackboardAttribute( self.att_rec_row, YourPhoneIngestModuleFactory. moduleName, str(line))) self.index_artifact(blackboard, art, self.art_freespace) line = f.readline() except Exception as e: self.log(Level.SEVERE, str(e)) pass try: mdg = mdgMod.mdg_modified.sqlite_rec(dbPath) res = mdg.extract_deleted() for line in res: art = file.newArtifact(self.art_dp.getTypeID()) art.addAttribute( BlackboardAttribute( self.att_dp_type, YourPhoneIngestModuleFactory.moduleName, str(line[0]))) art.addAttribute( BlackboardAttribute( self.att_dp_offset, YourPhoneIngestModuleFactory.moduleName, str(line[1]))) art.addAttribute( BlackboardAttribute( self.att_dp_lenght, YourPhoneIngestModuleFactory.moduleName, str(line[2]))) art.addAttribute( BlackboardAttribute( self.att_dp_data, YourPhoneIngestModuleFactory.moduleName, str(line[3]))) self.index_artifact(blackboard, art, self.art_dp) except Exception as e: self.log(Level.SEVERE, str(e)) pass except Exception as e: self.log(Level.SEVERE, str(e)) continue finally: dbConn.close() try: os.remove(dbPath) except (Exception, OSError) as e: self.log(Level.SEVERE, str(e)) try: full_path = (file.getParentPath() + file.getName()) split = full_path.split('/') guidPath = '/'.join(split[:-3]) usrPath = guidPath + '/User' self.log(Level.INFO, usrPath) ufiles = fileManager.findFiles(dataSource, '%', usrPath) self.log(Level.INFO, ufiles[0].getName()) for ufile in ufiles: rpPath = ufile.getParentPath() + ufile.getName( ) + '/Recent Photos/' picfiles = fileManager.findFiles(dataSource, '%', rpPath) for pic in picfiles: self.log(Level.INFO, pic.getName()) # Make an artifact art = pic.newArtifact(self.art_pictures.getTypeID()) # Register file size art.addAttribute( BlackboardAttribute( self.att_pic_size, YourPhoneIngestModuleFactory.moduleName, pic.getSize())) self.index_artifact(blackboard, art, self.art_pictures) except Exception as e: self.log(Level.INFO, "failed to obtain photos") continue if not self.anyValidFileFound: Message.info("YPA: No valid database file found") return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat1 = skCase.addArtifactType( "TSK_FH_CATALOG_1", "File History Catalog 1") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 1. ==> ") artID_cat1 = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat2 = skCase.addArtifactType( "TSK_FH_CATALOG_2", "File History Catalog 2") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 2. ==> ") artID_cat2 = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") # Create the attribute type, if it exists then catch the error try: attID_fh_pn = skCase.addArtifactAttributeType('TSK_FH_PATH', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Parent Path") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch Parent Path. ==> ") try: attID_fh_fn = skCase.addArtifactAttributeType('TSK_FH_FILE_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fh_fs = skCase.addArtifactAttributeType('TSK_FH_FILE_SIZE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_fh_usn = skCase.addArtifactAttributeType('TSK_FH_USN_JOURNAL_ENTRY', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "USN Journal Entry") except: self.log(Level.INFO, "Attributes Creation Error, USN Journal Entry. ==> ") try: attID_fh_fc = skCase.addArtifactAttributeType('TSK_FH_FILE_CREATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Created") except: self.log(Level.INFO, "Attributes Creation Error, File Created. ==> ") try: attID_fh_fm = skCase.addArtifactAttributeType('TSK_FH_FILE_MODIFIED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Modified") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_fh_bq = skCase.addArtifactAttributeType('TSK_FH_BACKUP_QUEUED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Queued") except: self.log(Level.INFO, "Attributes Creation Error, Backup Queued ==> ") try: attID_fh_bc = skCase.addArtifactAttributeType('TSK_FH_BACKUP_CREATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Created") except: self.log(Level.INFO, "Attributes Creation Error, Backup Created ==> ") try: attID_fh_bcp = skCase.addArtifactAttributeType('TSK_FH_BACKUP_CAPTURED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Captured") except: self.log(Level.INFO, "Attributes Creation Error, Backup Captured. ==> ") try: attID_fh_bu = skCase.addArtifactAttributeType('TSK_FH_BACKUP_UPDATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Updated") except: self.log(Level.INFO, "Attributes Creation Error, Backup Updated. ==> ") try: attID_fh_bv = skCase.addArtifactAttributeType('TSK_FH_BACKUP_VISIBLE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Visible") except: self.log(Level.INFO, "Attributes Creation Error, Backup Visible ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created #artID_wfh = skCase.getArtifactTypeID("TSK_PREFETCH") #artID_cat1 = skCase.getArtifactType("TSK_FH_CATALOG_1") #artID_cat2 = skCase.getArtifactType("TSK_FH_CATALOG_2") attID_fh_pn = skCase.getAttributeType("TSK_FH_PATH") attID_fh_fn = skCase.getAttributeType("TSK_FH_FILE_NAME") attID_fh_fs = skCase.getAttributeType("TSK_FH_FILE_SIZE") attID_fh_usn = skCase.getAttributeType("TSK_FH_USN_JOURNAL_ENTRY") attID_fh_fc = skCase.getAttributeType("TSK_FH_FILE_CREATED") attID_fh_fm = skCase.getAttributeType("TSK_FH_FILE_MODIFIED") attID_fh_bq = skCase.getAttributeType("TSK_FH_BACKUP_QUEUED") attID_fh_bc = skCase.getAttributeType("TSK_FH_BACKUP_CREATED") attID_fh_bcp = skCase.getAttributeType("TSK_FH_BACKUP_CAPTURED") attID_fh_bu = skCase.getAttributeType("TSK_FH_BACKUP_UPDATED") attID_fh_bv = skCase.getAttributeType("TSK_FH_BACKUP_VISIBLE") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the file history files from the users folders fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%edb", "%/Windows/FileHistory/%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create file history directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "File_History") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "File_History Directory already exists " + Temp_Dir) # Write out each catalog esedb database to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName() + "_" + str(file.getId())) db_name = os.path.splitext(file.getName())[0] lclSQLPath = os.path.join(Temp_Dir, db_name + "_" + str(file.getId()) + ".db3") ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database if PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath, os.path.dirname(os.path.abspath(__file__))], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if db_name == "Catalog1": artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_1") else: artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_2") userpath = file.getParentPath() username = userpath.split('/') self.log(Level.INFO, "Getting Username " + username[2] ) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclSQLPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + lclSQLPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "Select ParentName 'TSK_FH_PATH', Childname 'TSK_FH_FILE_NAME', " + \ "Filesize 'TSK_FH_FILE_SIZE', " + \ "usn 'TSK_FH_USN_JOURNAL_ENTRY', " + \ "FileCreated 'TSK_FH_FILE_CREATED', filemodified 'TSK_FH_FILE_MODIFIED', " + \ "tqueued 'TSK_FH_BACKUP_QUEUED', tcreated 'TSK_FH_BACKUP_CREATED', " + \ "tcaptured 'TSK_FH_BACKUP_CAPTURED', tupdated 'TSK_FH_BACKUP_UPDATED', " + \ "tvisible 'TSK_FH_BACKUP_VISIBLE' from file_history" self.log(Level.INFO, "SQL Statement --> " + SQL_Statement) resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for File_History table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") FH_Path = resultSet.getString("TSK_FH_PATH") FH_File_Name = resultSet.getString("TSK_FH_FILE_NAME") FH_Filesize = resultSet.getString("TSK_FH_FILE_SIZE") FH_Usn = resultSet.getString("TSK_FH_USN_JOURNAL_ENTRY") FH_FC = resultSet.getInt("TSK_FH_FILE_CREATED") FH_FM = resultSet.getInt("TSK_FH_FILE_MODIFIED") FH_BQ = resultSet.getInt("TSK_FH_BACKUP_QUEUED") FH_BC = resultSet.getInt("TSK_FH_BACKUP_CREATED") FH_BCP = resultSet.getInt("TSK_FH_BACKUP_CAPTURED") FH_BU = resultSet.getInt("TSK_FH_BACKUP_UPDATED") FH_BV = resultSet.getInt("TSK_FH_BACKUP_VISIBLE") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported art = file.newArtifact(artID_fh) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_fh_pn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Path)), \ (BlackboardAttribute(attID_fh_fn, ParseFileHistoryIngestModuleFactory.moduleName, FH_File_Name)), \ (BlackboardAttribute(attID_fh_fs, ParseFileHistoryIngestModuleFactory.moduleName, FH_Filesize)), \ (BlackboardAttribute(attID_fh_usn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Usn)), \ (BlackboardAttribute(attID_fh_fc, ParseFileHistoryIngestModuleFactory.moduleName, FH_FC)), \ (BlackboardAttribute(attID_fh_fm, ParseFileHistoryIngestModuleFactory.moduleName, FH_FM)), \ (BlackboardAttribute(attID_fh_bq, ParseFileHistoryIngestModuleFactory.moduleName, FH_BQ)), \ (BlackboardAttribute(attID_fh_bc, ParseFileHistoryIngestModuleFactory.moduleName, FH_BC)), \ (BlackboardAttribute(attID_fh_bcp, ParseFileHistoryIngestModuleFactory.moduleName, FH_BCP)), \ (BlackboardAttribute(attID_fh_bu, ParseFileHistoryIngestModuleFactory.moduleName, FH_BU)), \ (BlackboardAttribute(attID_fh_bv, ParseFileHistoryIngestModuleFactory.moduleName, FH_BV)), \ (BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), \ ParseFileHistoryIngestModuleFactory.moduleName, username[2])))) try: #index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParseFileHistoryIngestModuleFactory.moduleName, artID_fh_evt, None)) # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up prefetch directory and files try: shutil.rmtree(Temp_Dir) except: self.log(Level.INFO, "removal of directory tree failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Windows File History Parser", " Windows File History Has Been Parsed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log(Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType( "TSK_EVTX_LOGS", "Windows Event Logs") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_BY_ID", "Windows Event Logs By Event Id") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") try: attID_ev_fn = skCase.addArtifactAttributeType("TSK_EVTX_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType("TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType("TSK_EVTX_COMPUTER_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log(Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> ") try: attID_ev_el = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_LEVEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType("TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log(Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType("TSK_EVTX_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType("TSK_EVTX_SOURCE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType("TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_BY_ID") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.evtx") # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "EventLogs") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Event Log Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source " + self.path_to_exe + " parm 1 ==> " + temp_dir + " Parm 2 ==> " + os.path.join(Temp_Dir,"\EventLogs.db3")) subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "EventLogs.db3")]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log(Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # files = [] # fileManager = Case.getCurrentCase().getServices().getFileManager() # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. if self.List_Of_Events[0] != 'ALL': try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " and Event_Identifier in ('" + self.Event_Id_List + "');" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString("Computer_Name") Event_Identifier = resultSet.getInt("Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString("Event_Source_Name") Event_User_Security_Identifier = resultSet.getString("Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString("Event_Detail_Text") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxByEventIDIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxByEventIDIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxByEventIDIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time_Epoch)) else: try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.log(Level.INFO, "This is the to see what the FU is") # Cycle through each row and create artifacts while resultSet_1.next(): try: self.log(Level.INFO, "This is the to see what the FU is 2") #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt("Event_Identifier") Event_ID_Count = resultSet_1.getInt("Number_Of_Events") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") self.log(Level.INFO, "This is the to see what the FU is 3") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log(Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up try: if self.List_Of_Events[0] != 'ALL': stmt.close() else: stmt_1.close() dbConn.close() os.remove(lclDbPath) except: self.log(Level.INFO, "Error closing the statment, closing the database or removing the file") #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir,file.getName())) except: self.log(Level.INFO, "removal of Event Log file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "MacFSEvents") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "FSEvents Directory already exists " + temp_dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", ".fseventsd") numFiles = len(files) for file in files: #self.log(Level.INFO, "Files ==> " + file.getName()) if (file.getName() == "..") or (file.getName() == '.') or (file.getName() == 'fseventsd-uuid'): pass #self.log(Level.INFO, "Files ==> " + str(file)) else: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(filePath)) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) self.log(Level.INFO, "Running program ==> " + self.MacFSEvents_Executable + " -c Autopsy " + "-o " + temp_dir + \ " -s " + Temp_Dir + "\MacFSEvents") pipe = Popen([self.MacFSEvents_Executable, "-c", "Autopsy", "-o", temp_dir, "-s", temp_dir], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) database_file = os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite") #open the database to get the SQL and artifact info out of try: head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "fsevents_sql.db3") Class.forName("org.sqlite.JDBC").newInstance() dbConn1 = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt1 = dbConn1.createStatement() sql_statement1 = "select distinct artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts") artID_fse = skCase.addArtifactType( resultSet1.getString("artifact_name"), resultSet1.getString("artifact_title")) except: self.log(Level.INFO, "Artifacts Creation Error, " + resultSet1.getString("artifact_name") + " some artifacts may not exist now. ==> ") except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Create the attribute type, if it exists then catch the error try: attID_fse_fn = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fse_msk = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_MASK", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Mask") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_src = skCase.addArtifactAttributeType("TSK_FSEVENTS_SOURCE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source File") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_dte = skCase.addArtifactAttributeType("TSK_FSEVENTS_DATES", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Date(s)") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite")) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #artID_fse = skCase.getArtifactTypeID("TSK_MACOS_FSEVENTS") #artID_fse_evt = skCase.getArtifactType("TSK_MACOS_FSEVENTS") artID_fse = skCase.getArtifactTypeID("TSK_MACOS_ALL_FSEVENTS") artID_fse_evt = skCase.getArtifactType("TSK_MACOS_ALL_FSEVENTS") attID_fse_fn = skCase.getAttributeType("TSK_FSEVENTS_FILE_NAME") attID_fse_msk = skCase.getAttributeType("TSK_FSEVENTS_FILE_MASK") attID_fse_src = skCase.getAttributeType("TSK_FSEVENTS_SOURCE") attID_fse_dte = skCase.getAttributeType("TSK_FSEVENTS_DATES") # Query the database for file in files: if ('slack' in file.getName()): pass elif (file.getName() == '..') or (file.getName() == '.'): pass else: stmt1 = dbConn1.createStatement() sql_statement1 = "select sql_statement, artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: artID_fse = skCase.getArtifactTypeID(resultSet1.getString("artifact_name")) artID_fse_evt = skCase.getArtifactType(resultSet1.getString("artifact_name")) try: stmt = dbConn.createStatement() sql_statement = resultSet1.getString("sql_statement") + " and source like '%" + file.getName() + "';" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet = stmt.executeQuery(sql_statement) #self.log(Level.INFO, "query SQLite Master table ==> " ) #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifact while resultSet.next(): # Add the attributes to the artifact. art = file.newArtifact(artID_fse) #self.log(Level.INFO, "Result ==> " + resultSet.getString("mask") + ' <==> ' + resultSet.getString("source")) art.addAttributes(((BlackboardAttribute(attID_fse_fn, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("filename"))), \ (BlackboardAttribute(attID_fse_msk, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("mask"))), \ (BlackboardAttribute(attID_fse_src, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("source"))), \ (BlackboardAttribute(attID_fse_dte, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("OTHER_DATES"))))) #try: # index the artifact for keyword search #blackboard.indexArtifact(art) #except: #self.log(Level.INFO, "Error indexing artifact " + art.getDisplayName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") try: stmt.close() except: self.log(Level.INFO, "Error closing statement for " + file.getName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(MacFSEventsIngestModuleFactory.moduleName, artID_fse_evt, None)) try: stmt.close() dbConn.close() stmt1.close() dbConn1.close() #os.remove(Temp_Dir + "Autopsy_FSEvents-EXCEPTIONS_LOG.txt") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records.tsv") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records_DB.sqlite") shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of MacFSEvents imageinfo database failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "MacFSEventsSettings", " MacFSEventsSettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "store.vol", "UnistoreDB") numFiles = len(files) #self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing moduleDirectory = os.path.join( Case.getCurrentCase().getModuleDirectory(), "Email") temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "Email") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(moduleDirectory) except: pass #self.log(Level.INFO, "Module directory already exists " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processEsedbFile( extractedFile, os.path.join(moduleDirectory, str(file.getId()) + "-" + file.getName() + ".db3")) for file in files: # Open the DB using JDBC lclDbPath = os.path.join( moduleDirectory, str(file.getId()) + "-" + file.getName() + ".db3") #self.log(Level.INFO, "Path to the mail database is ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: #self.log(Level.INFO, "Could not open database file (not SQLite) " + lclDbPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #self.processRecipients(dbConn, skCase, file) self.processEmails(dbConn, skCase, file) # Clean up dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files ### To Do # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "WinMail Processor", " Windows Mail Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process Hiberfil.sys and Crash Dumps") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory if self.hiber_flag: Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() try: ModOut_Dir = os.path.join(Mod_Dir, "Volatility", "Memory-Image-hiberfil") self.log(Level.INFO, "Module Output Directory ===> " + ModOut_Dir) #dir_util.mkpath(ModOut_Dir) os.mkdir(os.path.join(Mod_Dir, "Volatility")) os.mkdir(ModOut_Dir) except: self.log(Level.INFO, "***** Error Module Output Directory already exists " + ModOut_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "hiberfil.sys", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) for file in files: self.log(Level.INFO, "File to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + file.getName()) tmp_Dir = Case.getCurrentCase().getTempDirectory() Hiber_File = os.path.join(tmp_Dir, file.getName()) ContentUtils.writeToFile(file, File(Hiber_File)) self.log(Level.INFO, "File name to process is ==> " + Hiber_File) # Create the directory to dump the hiberfil dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) pipe = Popen([self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Add hiberfil memory image to a new local data source services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") dir_list = [] dir_list.append(dump_file) # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager_2 = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager_2.addLocalFilesDataSource(str(device_id), "Hiberfile Memory Image", "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "HiberFil_Crash", " Hiberfil/Crash Dumps have been extracted fro Image. " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK