def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "img_stat-" + str(dataSource.getId()) + ".txt") reportHandle = open(reportPath, 'w') # Run the EXE, saving output to the report # NOTE: we should really be checking for if the module has been # cancelled and then killing the process. self.log(Level.INFO, "Running program on data source") subprocess.Popen([self.path_to_exe, imagePaths[0]], stdout=reportHandle).communicate()[0] reportHandle.close() # Add the report to the case, so it shows up in the tree Case.getCurrentCase().addReport(reportPath, "Run EXE", "img_stat output") return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # For an example, we write a file with the number of files created in the past 2 weeks # Configure progress bar for 2 tasks progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(2) # Get files by created in last two weeks. fileCount = 0 autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime) for otherFile in otherFiles: fileCount += 1 progressBar.increment() # Write the result to the report file. report = open(baseReportDir + '\\' + self.getRelativeFilePath(), 'w') report.write("file count = %d" % fileCount) Case.getCurrentCase().addReport(report.name, "SampleGeneralReportModule", "Sample Python Report"); report.close() progressBar.increment() progressBar.complete()
def generateReport(self, baseReportDir, progressBar): # For an example, we write a file with the number of files created in the past 2 weeks # Configure progress bar for 2 tasks progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(2) # Get files by created in last two weeks. fileCount = 0 autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) otherFiles = sleuthkitCase.findFilesWhere("crtime > %d" % minTime) for otherFile in otherFiles: fileCount += 1 progressBar.increment() # Write the result to the report file. report = open(baseReportDir + '\\' + self.getRelativeFilePath(), 'w') report.write("file count = %d" % fileCount) Case.getCurrentCase().addReport(report.name, "SampleGeneralReportModule", "Sample Python Report") report.close() progressBar.increment() progressBar.complete()
def startUp(self, context): self.context = context self.temp_dir = Case.getCurrentCase().getTempDirectory() blackboard = Case.getCurrentCase().getServices().getBlackboard() self.use_undark = self.local_settings.getSetting("undark") == "true" self.use_mdg = self.local_settings.getSetting("mdg") == "true" self.use_crawler = self.local_settings.getSetting("crawler") == "true" self.use_b2l = self.local_settings.getSetting("b2l") == "true" self.python_path = self.local_settings.getSetting("python_path") self.log(Level.INFO, "Python path: " + str(self.python_path)) # Generic attributes self.att_id = self.create_attribute_type('NA_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "ID", blackboard) self.att_type = self.create_attribute_type('NA_TYPE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Type", blackboard) self.att_created_time = self.create_attribute_type('NA_CREATED_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Created time", blackboard) self.att_modified_time = self.create_attribute_type('NA_UPDATED_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Updated time", blackboard) self.att_expiry_time = self.create_attribute_type('NA_EXPIRY_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Expiry time", blackboard) self.att_arrival_time = self.create_attribute_type('NA_ARRIVAL_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Arrival time", blackboard) # Notification handler attributes self.att_handler_primary_id = self.create_attribute_type('NA_HANDLER_PRIMARY_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Primary ID", blackboard) self.att_parent_id = self.create_attribute_type('NA_PARENT_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Parent ID", blackboard) self.att_wns_id = self.create_attribute_type('NA_WNS_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "WNS ID", blackboard) self.att_wnf_event_name = self.create_attribute_type('NA_WNF_EVENT_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "WNF Event Name", blackboard) self.att_system_data_property_set = self.create_attribute_type('NA_SYSTEM_DATA_PROPERTY_SET', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "System data property set", blackboard) self.att_app_name = self.create_attribute_type('NA_APP_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "App name (Your Phone)", blackboard) # Notification attributes self.att_payload = self.create_attribute_type('NA_PAYLOAD', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Payload", blackboard) self.att_payload_type = self.create_attribute_type('NA_PAYLOAD_TYPE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Content format", blackboard) # DB User Version self.att_db_uv = self.create_attribute_type('NA_DB_UV', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "SQLite User Version", blackboard)
def generateReport(self, baseReportDir, progressBar): # Setup the progress bar progressBar.setIndeterminate(False) progressBar.start() f = open(os.path.join(projectEIAppDataPath,"filesToReport.txt"), "r") for line in f: line = line[:-1] reporfile = open(line, "r") # Open the output file reportName = line.split("\\") fileName = os.path.join(baseReportDir, reportName[-1]) report = open(fileName, 'w') for contentline in reporfile: report.write(contentline) report.close() # Add the report to the Case, so it is shown in the tree if ".css" not in line: Case.getCurrentCase().addReport(fileName, self.moduleName, reportName[-1]) progressBar.complete(ReportStatus.COMPLETE)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write( resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "img_stat-" + str(dataSource.getId()) + ".txt") reportHandle = open(reportPath, 'w') # Run the EXE, saving output to the report # NOTE: we should really be checking for if the module has been # cancelled and then killing the process. self.log(Level.INFO, "Running program on data source") subprocess.Popen([self.path_to_exe, imagePaths[0]], stdout=reportHandle).communicate()[0] reportHandle.close() # Add the report to the case, so it shows up in the tree Case.getCurrentCase().addReport(reportPath, "Run EXE", "img_stat output") return IngestModule.ProcessResult.OK
def analyze(self, dataSource, fileManager, context): try: global wwfAccountType wwfAccountType = Case.getCurrentCase().getSleuthkitCase( ).getCommunicationsManager().addAccountType( "WWF", "Words with Friends") absFiles = fileManager.findFiles(dataSource, "WordsFramework") for abstractFile in absFiles: try: jFile = File( Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile( abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findWWFMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding WWF messages. pass
def shutDown(self): noDupes = list(set(md5)) try: if(filename): uniquePath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "NewLowHangingFruit.txt") uniqueFile = open(uniquePath,'w') dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % filename) stmt = dbConn.createStatement() for line in noDupes: resultSet = stmt.executeQuery("SELECT * FROM MD5 where md5 == '%s'" % line) if(resultSet.next()): temp = "Future Improvement" else: uniqueFile.write(line+'\n') stmt.close() dbConn.close() uniqueFile.close() except: allPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "AllLowHangingFruit.txt") allFile = open(allPath,'w') for line in noDupes: allFile.write(line+'\n') allFile.close()
def addBackupFilesToDataSource(self, dataSource, modDir): progressUpdater = ProgressUpdater() newDataSources = [] dirList = [] dirList.append(modDir) fileManager = Case.getCurrentCase().getServices().getFileManager() skCase = Case.getCurrentCase() deviceId = UUID.randomUUID() skCase.notifyAddingDataSource(deviceId) # Add data source with files newDataSource = fileManager.addLocalFilesDataSource( str(deviceId), "Itunes Backup", "", dirList, progressUpdater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added filesAdded = progressUpdater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for fileAdded in filesAdded: skCase.notifyDataSourceAdded(fileAdded, deviceId)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write(resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # Open the output file. fileName = os.path.join(baseReportDir, self.getRelativeFilePath()) report = open(fileName, 'w') # Query the database for the files (ignore the directories) sleuthkitCase = Case.getCurrentCase().getSleuthkitCase() files = sleuthkitCase.findAllFilesWhere("NOT meta_type = " + str(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue())) # Setup the progress bar progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(len(files)) for file in files: md5 = file.getMd5Hash() # md5 will be None if Hash Lookup module was not run if md5 is None: md5 = "" report.write(file.getUniquePath() + "," + md5 + "\n") progressBar.increment() report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport(fileName, self.moduleName, "Hashes CSV") progressBar.complete(ReportStatus.COMPLETE)
def shutDown(self): noDupes = list(set(md5)) try: if (filename): uniquePath = os.path.join( Case.getCurrentCase().getCaseDirectory(), "NewLowHangingFruit.txt") uniqueFile = open(uniquePath, 'w') dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % filename) stmt = dbConn.createStatement() for line in noDupes: resultSet = stmt.executeQuery( "SELECT * FROM MD5 where md5 == '%s'" % line) if (resultSet.next()): temp = "Future Improvement" else: uniqueFile.write(line + '\n') stmt.close() dbConn.close() uniqueFile.close() except: allPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "AllLowHangingFruit.txt") allFile = open(allPath, 'w') for line in noDupes: allFile.write(line + '\n') allFile.close()
def createAndPostSBReport(self, name, path, SB): filename = name + "SuperBlockReport.txt" if (not path): os.makedirs(path) filePath = os.path.join(path, filename) report = open(filePath, 'wb+') report.write("------" + name + " QNX6FS Super Block informations------\n\n") report.write("Serial number : " + hex(int(SB["serialNum"])) + "\n") report.write("Magic number : " + hex(int(SB["magic"])) + "\n") report.write("File system creation time : " + datetime.fromtimestamp( int(SB['ctime'])).strftime("%m/%d/%Y, %H:%M:%S") + "\n") report.write("File system modification time : " + datetime.fromtimestamp(int(SB['ctime'])).strftime( "%m/%d/%Y, %H:%M:%S") + "\n") report.write("File system access time : " + datetime.fromtimestamp( int(SB['ctime'])).strftime("%m/%d/%Y, %H:%M:%S") + "\n") report.write("Block Size : " + str(int(SB["tailleBlock"])) + " bytes \n") report.write("Number of blocks : " + hex(int(SB["nbBlocks"])) + "\n") report.write("Number of free blocks : " + hex(int(SB["nbBlocksLibres"])) + "\n") report.write("Number of inodes : " + hex(int(SB["nbInodes"])) + "\n") report.write("Number of free inodes : " + hex(int(SB["nbInodesLibres"])) + "\n") report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport( filePath, QNX6ReaderIngestModuleFactory.moduleName, name + " Super Block Report")
def startUp(self, context): # Determine if user configured a flag in UI if self.local_settings.getFlag(): self.stanpl = True else: self.stanpl = False # Counters self.jobId = context.getJobId() self.filesFound = 0 self.dbFound = 0 self.picFound = 0 self.jsonFound = 0 self.lastFile_rep = '' self.el_rep = None self.lastFile = '' self.el = None # Inits the xml element self.root = et.Element("androidgeodata") self.root_report = et.Element("report") # File where the xml is stored self.xmlname = os.path.join( Case.getCurrentCase().getReportDirectory(), Case.getCurrentCase().getName()+"_"+str(self.jobId)) # Checks whether the JSON file exists, if not the module doesn't run path_to_dict = os.path.dirname(os.path.abspath(__file__)) + '/dictionary.json' if not os.path.exists(path_to_dict): raise IngestModuleException("The dictionary file was not found in module folder") else: try: self.dict = json.load( open(path_to_dict) ) except: raise IngestModuleException("The dictionary file was not loaded")
def index_artifact(artifact, artifact_type): try: Case.getCurrentCase().getServices().getBlackboard().indexArtifact( artifact) except: logging.warning("Error indexing artifact type: " + artifact_type) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent("Forensics Analyzer", artifact_type, None))
def create_attribute_type(att_name, type, att_desc): try: Case.getCurrentCase().getSleuthkitCase().addArtifactAttributeType( att_name, type, att_desc) except: logging.warning("Error creating attribute type: " + att_desc) return Case.getCurrentCase().getSleuthkitCase().getAttributeType( att_name)
def startUp(self, context): self.filesFound = 0 # List of images and videoes self.listOfMimeToCopy = [ 'image/bmp', 'image/gif', 'image/heic', 'image/jpeg', 'image/png', 'image/tiff', 'image/vnd.adobe.photoshop', 'image/x-raw-nikon', 'image/x-ms-bmp', 'image/x-icon', 'image/webp', 'image/vnd.microsoft.icon', 'image/x-rgb', 'image/x-ms-bmp', 'image/x-xbitmap', 'image/x-portable-graymap', 'image/x-portable-bitmap', 'video/webm', 'video/3gpp', 'video/3gpp2', 'video/ogg', 'video/mpeg', 'video/mp4', 'video/quicktime', 'video/x-msvideo', 'video/x-flv', 'video/x-m4v', 'video/x-ms-wmv', 'audio/midi', 'audio/mpeg', 'audio/webm', 'audio/ogg', 'audio/wav', 'audio/vnd.wave', 'audio/x-ms-wma' ] # Export directory (C:\Users\user\Documents\cases\1568795\Autopsy\1568795_2020_5060_90_1_sofias_pc\Export) exportDirectory = Case.getCurrentCase().getExportDirectory() caseName = Case.getCurrentCase().getName() number = Case.getCurrentCase().getNumber() # Export make C:\Users\user\Documents\cases\1568795\ exportDirectory = exportDirectory.replace("\\Autopsy", "") exportDirectory = exportDirectory.replace("\\" + str(number), "") exportDirectory = exportDirectory.replace("\\Export", "") self.log( Level.INFO, "==> 1) exportDirectory=" + str(exportDirectory) + " number=" + str(number) + " caseName=" + str(caseName)) try: os.mkdir(exportDirectory) except: pass # Export make C:\Users\user\Documents\cases\1568795\Img_video_audio exportDirectory = os.path.join(exportDirectory, "Img_video_audio") self.log( Level.INFO, "==> 2) exportDirectory=" + str(exportDirectory) + " number=" + str(number)) try: os.mkdir(exportDirectory) except: pass # Export make C:\Users\user\Documents\cases\1568795\Img_video_audio\1568795_2020_5060_90_1_sofias_pc exportDirectory = os.path.join(exportDirectory, number) self.log( Level.INFO, "==> 3) exportDirectory=" + str(exportDirectory) + " number=" + str(number)) try: os.mkdir(exportDirectory) except: pass # Pass parameter self.exportDirectoryGlobal = exportDirectory pass
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process") skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing exportDirectory = Case.getCurrentCase().getExportDirectory() exportDir = os.path.join(exportDirectory, "Mass_Export") self.log(Level.INFO, "create Directory " + exportDir) try: os.mkdir(exportDir) except: self.log(Level.INFO, "Mass Export directory already exists" + exportDir) for fileExtension in self.extensionList: fileExt = fileExtension.strip() files = fileManager.findFiles(dataSource, "%." + fileExt) numFiles = len(files) self.log( Level.INFO, "found " + str(numFiles) + " files for extension ==> " + str(fileExtension)) expDir = os.path.join(exportDir, fileExt) try: os.mkdir(expDir) except: self.log(Level.INFO, "Directory already exists ==> " + str(expDir)) for file in files: #self.log(Level.INFO, 'Parent Path is ==> ' + file.getParentPath()) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Writing file ==> " + file.getName()) extractedFile = os.path.join( expDir, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) except: self.log( Level.INFO, "Error writing File " + os.path.join(temporaryDirectory, file.getName())) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Mass Export By Extension Complete", "Complete") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def DJIPhantomReportFiles(self, moduleDirectory): for fileName in os.listdir(moduleDirectory): if fileName.endswith(".csv") or fileName.endswith(".kml"): # Add the report to the Case, so it is shown in the tree fullFileName = os.path.join(moduleDirectory, fileName) self.log(Level.INFO, "FileName ==> " + str(fullFileName)) Case.getCurrentCase().addReport(fullFileName, fileName, fullFileName)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Blackboard API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_blackboard.html blackboard = Case.getCurrentCase().getServices().getBlackboard() # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() # Querying all videos files = [] for extension in ALLOWED_EXTENSIONS: try: files.extend(fileManager.findFiles(dataSource, "%" + extension)) except TskCoreException: self.log(Level.INFO, "Error getting files from: '" + extension + "'") numFiles = len(files) if len(numFiles) == 0: self.log(Level.ERROR, "Didn't find any usable files! Terminating") return DataSourceIngestModule.ProcessResult.OK self.log(Level.INFO, "Found " + str(numFiles) + " files") module_output_dir = Case.getCurrentCase().getModuleDirectory() module_dir = os.path.join(output_dir, dataSource.getName(), C_FDRI_DIR) # Calling thread to do the work # This can/will block for a long time executable_thread = Thread( target=lambda: self.thread_work(self.pathToExe, configFilePath)) executable_thread.start() # Killing thread if user press cancel # Seems kinda bad but it's the most responsive way possible to cancel the process while (executable_thread.isAlive()): # Checking cancel every secund if self.context.isJobCancelled(): self.log(Level.INFO, "User cancelled job! Terminating thread") JThread.interrupt(executable_thread) self.log(Level.INFO, "Thread terminated") self.deleteFiles(module_dir) return IngestModule.ProcessResult.OK time.sleep(1) #Post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join( Case.getCurrentCase().getTempDirectory(), str(file.getId()) + "." + file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen( [self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath() + file.getName() + '\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def create_artifact_type(base_name, art_name, art_desc): try: Case.getCurrentCase().getSleuthkitCase().addBlackboardArtifactType( art_name, base_name.capitalize() + art_desc) except: logging.warning("Error creating artifact type: " + art_desc) art = Case.getCurrentCase().getSleuthkitCase().getArtifactType( art_name) return art
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() chatFiles = fileManager.findFiles(dataSource, "%.txt", "/Documents/RingCentral/Meetings") numFiles = len(chatFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create RingCentral directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join( Case.getCurrentCase().getTempDirectory(), "RingCentral") try: os.mkdir(temporaryDirectory) except: pass # get and write out chat meeting files for file in chatFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join( temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.chatMeetingLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # try: # shutil.rmtree(temporaryDirectory) # except: # self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "CentralRing", " CentralRing Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get Devices Attached from blackboard //////////////////////// skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "Users/%/AppData/%/Microsoft/Outlook/") # create new artifact type try: skCase.addArtifactType("TSK_CKC_ATTACHMENTS_OPENED", "CKC Delivery File Attachments Opened") except: # if the artifact type already exists do nothing self.log(Level.INFO, "TSK_CKC_ATTACHMENTS_OPENED artifact already exists") try: attID_dateCreated = skCase.addArtifactAttributeType( "TSK_CKC_DATE_CREATED", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Date Created") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_dateModified = skCase.addArtifactAttributeType( "TSK_CKC_DATE_MODIFIED", BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Date Modified") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") artId = skCase.getArtifactTypeID("TSK_CKC_ATTACHMENTS_OPENED") attID_dateCreated = skCase.getAttributeType("TSK_CKC_DATE_CREATED") attID_dateModified = skCase.getAttributeType("TSK_CKC_DATE_MODIFIED") for file in files: self.log(Level.INFO, str(file.getNameExtension())) try: if file.getNameExtension() != "": art = file.newArtifact(artId) dateCreated = file.getCrtimeAsDate() dateModified = file.getMtimeAsDate() self.log(Level.INFO, str(dateCreated)) art.addAttribute() art.addAttributes( ((BlackboardAttribute(attID_dateCreated, ParseEvtxDbIngestModuleFactory.moduleName, dateCreated)), \ (BlackboardAttribute(attID_dateModified, ParseEvtxDbIngestModuleFactory.moduleName, dateModified)))) except: self.log(Level.INFO, "Artifact cannot be created. Moved to next.") return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): logging.info("Starting Report Module") progressBar.setIndeterminate(True) self.fileManager = Case.getCurrentCase().getServices().getFileManager() progressBar.updateStatusLabel("Finding source data") self.tempDirectory = os.path.join( Case.getCurrentCase().getModulesOutputDirAbsPath(), "AndroidForensics") if not os.path.exists(self.tempDirectory): progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Run Ingest Module first!") return progressBar.updateStatusLabel("Creating report") os.environ["CASE_NAME"] = Case.getCurrentCase().getName() os.environ["CASE_NUMBER"] = Case.getCurrentCase().getNumber() os.environ["EXAMINER"] = Case.getCurrentCase().getExaminer() reports = {} reports["reports"] = [] for fileset in os.listdir(self.tempDirectory): fileset_path = os.path.join(self.tempDirectory, fileset) for app_id in os.listdir(fileset_path): app_path = os.path.join(fileset_path, app_id) for app_report in os.listdir(app_path): report = os.path.join(app_path, app_report, "Report.json") if os.path.exists(report): report_content = Utils.read_json(report) report_path = Analyzer.generate_html_report( report_content, os.path.join(app_path, app_report)) Case.getCurrentCase().addReport( report_path, "Report", "Forensics Report") reports["reports"].append( Analyzer.generate_report_summary(report_content, app_report, fileset=fileset)) if len(reports) == 0: progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Nothing to report!") return report_file_path = Analyzer.generate_html_index(reports, baseReportDir) Case.getCurrentCase().addReport(report_file_path, "Report", "Forensics Report") progressBar.updateStatusLabel("Done") progressBar.complete(ReportStatus.COMPLETE)
def process(self, file): skCase = Case.getCurrentCase().getSleuthkitCase() ARTID_NS_TV = skCase.getArtifactTypeID(self.ARTIFACTTYPENAME_NS_TV) names = [] skCase = Case.getCurrentCase().getSleuthkitCase() # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() is False)): return IngestModule.ProcessResult.OK blackboard = Case.getCurrentCase().getServices().getBlackboard() if file.getName() == "80000000000000d1": artifactList = file.getArtifacts(ARTID_NS_TV) self.log(Level.INFO, "Found the file" + file.getName()) self.filesFound += 1 inputStream = ReadContentInputStream(file) buffer = jarray.zeros(2048, "b") totLen = 0 lengthofbuffer = inputStream.read(buffer) while lengthofbuffer != -1: totLen = totLen + lengthofbuffer lengthofbuffer = inputStream.read(buffer) currentBuffer = buffer.tostring() names = names + re.findall("EdidBlock.*?\\\\xfc\\\\x00(.*?)\\\\n.*?EdidExtensionBlock", repr(currentBuffer)) noduplicatesnames = list(set(names)) for tvname in noduplicatesnames: # Don't add to blackboard if the artifact already exists for artifact in artifactList: artifactName = artifact.getAttribute(self.NS_DISPLAY_ATTRIBUTES["Name"][3]) if artifactName.getValueString() == tvname: return IngestModule.ProcessResult.OK art = file.newArtifact(ARTID_NS_TV) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), ConnectedDisplayIngestModuleFactory.moduleName, "Nintendo Switch - Connected TV")) for attribute in self.NS_DISPLAY_ATTRIBUTES.keys(): art.addAttribute(BlackboardAttribute(self.NS_DISPLAY_ATTRIBUTES[attribute][3], ConnectedDisplayIngestModuleFactory.moduleName, str(tvname))) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ConnectedDisplayIngestModuleFactory.moduleName, skCase.getArtifactType(self.ARTIFACTTYPENAME_NS_TV), None)) return IngestModule.ProcessResult.OK
def customizeComponents(self): #self.Exclude_File_Sources_CB.setSelected(self.local_settings.getExclude_File_Sources()) #self.Run_Timesketch_CB.setSelected(self.local_settings.getRun_Timesketch()) #self.Import_Timesketch_CB.setSelected(self.local_settings.getImport_Timesketch()) #self.check_Database_entries() self.IP_Address_TF.setText(self.local_settings.getSetting('ipAddress')) self.Port_Number_TF.setText(self.local_settings.getSetting('portNumber')) self.userName_TF.setText(self.local_settings.getSetting('userName')) self.password_TF.setText(self.local_settings.getSetting('password')) self.sketchName_TF.setText(Case.getCurrentCase().getNumber()) self.sketchDescription_TF.setText(Case.getCurrentCase().getName()) self.local_settings.setSetting('sketchName', self.sketchName_TF.getText()) self.local_settings.setSetting('sketchDescription', self.sketchDescription_TF.getText())
def startUp(self, context): self.filesFound = 0 # List of system files we want to copy out self.listOfFileNames = [ 'pagefile.sys', 'swapfile.sys', 'SAM', 'SECURITY', 'SOFTWARE', 'SYSTEM' ] # Export directory (C:\Users\user\Documents\cases\1568795\Autopsy\1568795_2020_5060_90_1_sofias_pc\Export) exportDirectory = Case.getCurrentCase().getExportDirectory() caseName = Case.getCurrentCase().getName() number = Case.getCurrentCase().getNumber() # Export make C:\Users\user\Documents\cases\1568795\ exportDirectory = exportDirectory.replace("\\Autopsy", "") exportDirectory = exportDirectory.replace("\\" + str(number), "") exportDirectory = exportDirectory.replace("\\Export", "") self.log( Level.INFO, "==> 1) exportDirectory=" + str(exportDirectory) + " number=" + str(number) + " caseName=" + str(caseName)) try: os.mkdir(exportDirectory) except: pass # Export make C:\Users\user\Documents\cases\1568795\System_files exportDirectory = os.path.join(exportDirectory, "System_files") self.log( Level.INFO, "==> 2) exportDirectory=" + str(exportDirectory) + " number=" + str(number)) try: os.mkdir(exportDirectory) except: pass # Export make C:\Users\user\Documents\cases\1568795\System_files\1568795_2020_5060_90_1_sofias_pc exportDirectory = os.path.join(exportDirectory, number) self.log( Level.INFO, "==> 3) exportDirectory=" + str(exportDirectory) + " number=" + str(number)) try: os.mkdir(exportDirectory) except: pass # Pass parameter self.exportDirectoryGlobal = exportDirectory pass
def process(self, dataSource, progressBar): # Set the ogress bar to an Indeterminate state for now progressBar.switchToIndeterminate() # Return if we're not running on a windows sytem if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # Save our output to a file in the reports folder # named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and # delete the incomplete report file self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd) processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder, DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning, "Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def add_Volatility_Dump_dir(self, dataSource, dir_abstract_file_info, dump_dir, dir_name, local_dir): skCase = Case.getCurrentCase().getSleuthkitCase() self.log(Level.INFO, " dir Name is ==> " + dir_name) self.log( Level.INFO, " abstract parentPath is ==> " + str(dir_abstract_file_info.parentPath)) self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) dev_file = os.path.join(dump_dir, dir_name) local_file = os.path.join(local_dir, dir_name) if not (self.check_derived_existance( dataSource, dir_name, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(dir_name, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent( ModuleContentEvent(derived_file)) # self.context.addFilesToJob(df_list) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass #self.log(Level.INFO, " derived File Is ==> " + str(derived_file)) fileManager = Case.getCurrentCase().getServices().getFileManager() new_derived_file = fileManager.findFiles( dataSource, dir_name, dir_abstract_file_info.parentPath) numFiles = len(new_derived_file) self.log(Level.INFO, " print number of files is " + str(numFiles)) for file in new_derived_file: self.log(Level.INFO, "File Exists ==> " + str(file)) self.log(Level.INFO, "Local Directory ==> " + str(file.localPath)) self.log(Level.INFO, "Local Directory ==> " + local_file) if local_file == file.localPath: self.log(Level.INFO, "File Exists ==> " + str(file)) return file self.log(Level.INFO, "File Exists2 ==> " + str(new_derived_file[0])) return new_derived_file[0]
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and delete the incomplete report file # Do not add report to the case tree if the ingest is cancelled before finish. # This can be done by using IngestJobContext.dataSourceIngestIsCancelled # See: http://sleuthkit.org/autopsy/docs/api-docs/4.7.0/_ingest_job_context_8java.html self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd) processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder, DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning, "Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId())+"."+file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen([self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath()+file.getName()+'\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def add_Volatility_Dump_file(self, dataSource, dir_abstract_file_info, dump_dir, local_dir, pid_name): self.log(Level.INFO, "Adding Files from Dump Directory") self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) self.log(Level.INFO, "Parent Path is ==> " + str(dir_abstract_file_info)) #skCase = Case.getCurrentCase().getSleuthkitCase() skCase = Case.getCurrentCase().getServices().getFileManager() files = next(os.walk(dump_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(dump_dir, file) local_file = os.path.join(local_dir, file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_derived_existance(dataSource, file, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass
def shutDown(self): noDupes = list(set(md5)) outPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "GoldBuild.txt") outFile = open(outPath,'w') for line in noDupes: outFile.write(line+'\n') outFile.close()
def loadConfig(self): CONFIGURATION_PATH = Case.getCurrentCase().getModuleDirectory() + "\\config.json" if os.path.exists(CONFIGURATION_PATH): with open(CONFIGURATION_PATH, "r") as out: content = json.load(out) self.flags = content['flags'] self.paths['1'] = content['wanted_folder']
def createAttribute(self, attributeName, attributeType, attributeDescription): skCase = Case.getCurrentCase().getSleuthkitCase() try: if "string" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription) return skCase.getAttributeType(attributeName) elif "datetime" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, attributeDescription) return skCase.getAttributeType(attributeName) elif "integer" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, attributeDescription) return skCase.getAttributeType(attributeName) elif "long" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, attributeDescription) return skCase.getAttributeType(attributeName) elif "double" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, attributeDescription) return skCase.getAttributeType(attributeName) elif "byte" == attributeType: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE, attributeDescription) return skCase.getAttributeType(attributeName) else: attributeId = skCase.addArtifactAttributeType(attributeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, attributeDescription) return skCase.getAttributeType(attributeName) except: self.log(Level.INFO, "Attributes Creation Error ==> " + str(attributeName) + " <<>> " + str(attributeType) + " <<>> " + str(attributeDescription)) return skCase.getAttributeType(attributeName)
def indexArtifact(self, artifact): blackboard = Case.getCurrentCase().getServices().getBlackboard() try: blackboard.indexArtifact(artChat) except: pass
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger = Logger.getLogger(SampleJythonDataSourceIngestModuleFactory.moduleName) # we don't know how much work there is yet progressBar.switchToIndeterminate() autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def __findGeoLocationsInDB(self, databasePath, abstractFile): if not databasePath: return try: Class.forName("org.sqlite.JDBC") #load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() except (ClassNotFoundException) as ex: self._logger.log(Level.SEVERE, "Error loading JDBC driver", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) return except (SQLException) as ex: # Error connecting to SQL databse. return resultSet = None try: resultSet = statement.executeQuery("SELECT timestamp, latitude, longitude, accuracy FROM CachedPosition;") while resultSet.next(): timestamp = Long.valueOf(resultSet.getString("timestamp")) / 1000 latitude = Double.valueOf(resultSet.getString("latitude")) longitude = Double.valueOf(resultSet.getString("longitude")) attributes = ArrayList() artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp)) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME, "Browser Location History")) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(),moduleName, accuracy)) # NOTE: originally commented out artifact.addAttributes(attributes); try: # index the artifact for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() blackboard.indexArtifact(artifact) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) MessageNotifyUtil.Notify.error("Failed to index GPS trackpoint artifact for keyword search.", artifact.getDisplayName()) except SQLException as ex: # Unable to execute browser location SQL query against database. pass except Exception as ex: self._logger.log(Level.SEVERE, "Error putting artifacts to blackboard", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) finally: try: if resultSet is not None: resultSet.close() statement.close() connection.close() except Exception as ex: # Error closing database. pass
def analyze(self, dataSource, fileManager, context): try: global wwfAccountType wwfAccountType = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addAccountType("WWF", "Words with Friends") absFiles = fileManager.findFiles(dataSource, "WordsFramework") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findWWFMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding WWF messages. pass
def get_artifacts(self): sql_statement = "select distinct(type_name) 'type_name' from blackboard_artifacts a, blackboard_artifact_types b " + \ " where a.artifact_type_id = b.artifact_type_id;" skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): self.artifact_list.append(resultSet.getString("type_name")) dbquery.close()
def check_dervived_existance(self, dataSource, file_name, parent_file_abstract): fileManager = Case.getCurrentCase().getServices().getFileManager() dervived_file = fileManager.findFiles(dataSource, file_name, parent_file_abstract) numFiles = len(dervived_file) if numFiles == 0: return True else: return False
def createArtifact(self, artifactName, artifactDescription): skCase = Case.getCurrentCase().getSleuthkitCase(); try: artId = skCase.addArtifactType(artifactName, artifactDescription) return skCase.getArtifactTypeID(artifactName) except: #self.log(Level.INFO, "Artifacts Creation Error for artifact ==> " + str(artifactName) + " <<>> " + artifactDescription) return skCase.getArtifactTypeID(artifactName)
def add_Volatility_Dump_dir(self, dataSource, dir_abstract_file_info, dump_dir, dir_name, local_dir): skCase = Case.getCurrentCase().getSleuthkitCase() self.log(Level.INFO, " dir Name is ==> " + dir_name) self.log(Level.INFO, " abstract parentPath is ==> " + str(dir_abstract_file_info.parentPath)) self.log(Level.INFO, "Dump Dir is ==> " + dump_dir) self.log(Level.INFO, "Local Directory is ==> " + local_dir) dev_file = os.path.join(dump_dir, dir_name) local_file = os.path.join(local_dir, dir_name) if not(self.check_derived_existance(dataSource, dir_name, dir_abstract_file_info.parentPath)): # Add derived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type derived_file = skCase.addDerivedFile(dir_name, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, dir_abstract_file_info, "", "Volatility", self.Volatility_Version, "", TskData.EncodingType.NONE) IngestServices.getInstance().fireModuleContentEvent(ModuleContentEvent(derived_file)) # self.context.addFilesToJob(df_list) #self.log(Level.INFO, "Derived File ==> " + str(derived_file)) else: pass #self.log(Level.INFO, " derived File Is ==> " + str(derived_file)) fileManager = Case.getCurrentCase().getServices().getFileManager() new_derived_file = fileManager.findFiles(dataSource, dir_name, dir_abstract_file_info.parentPath) numFiles = len(new_derived_file) self.log(Level.INFO, " print number of files is " + str(numFiles)) for file in new_derived_file: self.log(Level.INFO, "File Exists ==> " + str(file)) self.log(Level.INFO, "Local Directory ==> " + str(file.localPath)) self.log(Level.INFO, "Local Directory ==> " + local_file) if local_file == file.localPath: self.log(Level.INFO, "File Exists ==> " + str(file)) return file self.log(Level.INFO, "File Exists2 ==> " + str(new_derived_file[0])) return new_derived_file[0]
def find_tags(self): sql_statement = "SELECT distinct(display_name) u_tag_name FROM content_tags INNER JOIN tag_names ON " + \ " content_tags.tag_name_id = tag_names.tag_name_id;" skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): self.tag_list.append(resultSet.getString("u_tag_name")) dbquery.close()
def process(self, dataSource, progressBar): # we don't know how much work there will be progressBar.switchToIndeterminate() # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Verify we have a disk image and not a folder of files if not isinstance(dataSource, Image): self.log(Level.INFO, "Ignoring data source. Not an image") return IngestModule.ProcessResult.OK # Get disk image paths imagePaths = dataSource.getPaths() # We'll save our output to a file in the reports folder, named based on EXE and data source ID reportFile = File(Case.getCurrentCase().getCaseDirectory() + "\\Reports" + "\\img_stat-" + str(dataSource.getId()) + ".txt") # Run the EXE, saving output to the report # Check if the ingest is terminated and delete the incomplete report file # Do not add report to the case tree if the ingest is cancelled before finish. # This can be done by using IngestJobContext.dataSourceIngestIsCancelled # See: http://sleuthkit.org/autopsy/docs/api-docs/4.7.0/_ingest_job_context_8java.html self.log(Level.INFO, "Running program on data source") cmd = ArrayList() cmd.add(self.pathToEXE.toString()) cmd.add(imagePaths[0]) processBuilder = ProcessBuilder(cmd); processBuilder.redirectOutput(reportFile) ExecUtil.execute(processBuilder,DataSourceIngestModuleProcessTerminator(self.context)) # Add the report to the case, so it shows up in the tree if not self.context.dataSourceIngestIsCancelled(): Case.getCurrentCase().addReport(reportFile.toString(), "Run EXE", "img_stat output") else: if reportFile.exists(): if not reportFile.delete(): self.log(LEVEL.warning,"Error deleting the incomplete report file") return IngestModule.ProcessResult.OK
def process(self, file): # Skip non-files if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.isFile() == False)): return IngestModule.ProcessResult.OK # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For an example, we will flag files with .txt in the name and make a blackboard artifact. if file.getName().lower().endswith(".txt"): self.log(Level.INFO, "Found a text file: " + file.getName()) self.filesFound+=1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonFileIngestModuleFactory.moduleName, "Text Files") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(SampleJythonFileIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) # For the example (this wouldn't be needed normally), we'll query the blackboard for data that was added # by other modules. We then iterate over its attributes. We'll just print them, but you would probably # want to do something with them. artifactList = file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) for artifact in artifactList: attributeList = artifact.getAttributes() for attrib in attributeList: self.log(Level.INFO, attrib.toString()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) return IngestModule.ProcessResult.OK
def generateReport(self, baseReportDir, progressBar): # For an example, we write a file with the number of files created in the past 2 weeks # Configure progress bar for 2 tasks progressBar.setIndeterminate(False) progressBar.start() progressBar.setMaximumProgress(2) # Find epoch time of when 2 weeks ago was currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) # (days * hours * minutes * seconds) # Query the database for files that meet our criteria sleuthkitCase = Case.getCurrentCase().getSleuthkitCase() files = sleuthkitCase.findAllFilesWhere("crtime > %d" % minTime) fileCount = 0 for file in files: fileCount += 1 # Could do something else here and write it to HTML, CSV, etc. # Increment since we are done with step #1 progressBar.increment() # Write the count to the report file. fileName = os.path.join(baseReportDir, self.getRelativeFilePath()) report = open(fileName, 'w') report.write("file count = %d" % fileCount) report.close() # Add the report to the Case, so it is shown in the tree Case.getCurrentCase().addReport(fileName, self.moduleName, "File Count Report"); progressBar.increment() # Call this with ERROR if report was not generated progressBar.complete(ReportStatus.COMPLETE)
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "tc.db") for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findTangoMessagesInDB(jFile.toString(), abstractFile, dataSource) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Tango messages. pass
def analyze(self, dataSource, fileManager, context): try: abstractFiles = fileManager.findFiles(dataSource, "CachedGeoposition%.db") for abstractFile in abstractFiles: if abstractFile.getSize() == 0: continue try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing browser location files", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding browser location files. pass
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "logs.db") absFiles.addAll(fileManager.findFiles(dataSource, "contacts.db")) absFiles.addAll(fileManager.findFiles(dataSource, "contacts2.db")) for abstractFile in absFiles: try: file = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, file, context.dataSourceIngestIsCancelled) self.__findCallLogsInDB(file.toString(), abstractFile, dataSource) except IOException as ex: self._logger.log(Level.SEVERE, "Error writing temporary call log db to disk", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding call logs. pass
def analyze(self, dataSource, fileManager, context): try: absFiles = fileManager.findFiles(dataSource, "da_destination_history") if absFiles.isEmpty(): return for abstractFile in absFiles: try: jFile = File(Case.getCurrentCase().getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) self.__findGeoLocationsInDB(jFile.toString(), abstractFile) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) except TskCoreException as ex: # Error finding Google map locations. pass
def Create_Diskpart_Script(self, size_of_disk, vdisk_name): # get the directory to store the scripts used by diskpart for x in range(68, 90): try: if not os.path.exists(chr(x) + ":"): pass except: open_drive = chr(x) + ":" break #open_drives = [ chr(x) + ": " for x in range(68,90) if not os.path.exists(chr(x) + ":") ] vdisk_script_dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "vdisk_scripts") try: os.mkdir(vdisk_script_dir) except: self.log(Level.INFO, "Vdisk script directory already exists") # script names vdisk_create_script = os.path.join(vdisk_script_dir, "create_vdisk.txt") vdisk_unmount_script = os.path.join(vdisk_script_dir, "unmount_vdisk.txt") vdisk_mount_script = os.path.join(vdisk_script_dir, "mount_vdisk.txt") # Create create, mount and format script vdc = open(vdisk_create_script, "w") vdc.write('create vdisk file="' + vdisk_name + '" maximum=' + str(size_of_disk) + " type=expandable \n") vdc.write("attach vdisk \n") vdc.write("create partition primary \n") vdc.write('format fs=ntfs label="Preview" quick \n') vdc.write("assign letter=" + open_drive + " \n") vdc.close() # Create Mount script vdc = open(vdisk_mount_script, "w") vdc.write('select vdisk file="' + vdisk_name + '"\n') vdc.write("attach vdisk \n") vdc.close() # Create Unmount script vdc = open(vdisk_unmount_script, "w") vdc.write('select vdisk file="' + vdisk_name + '"\n') vdc.write("detach vdisk \n") vdc.close() return vdisk_create_script, vdisk_unmount_script, vdisk_mount_script, open_drive
def check_derived_existance(self, dataSource, file_name, parent_file_path): self.log(Level.INFO, "File Name is ==> " + str(file_name) + " <==> Parent File Dir ==> " + str(parent_file_path)) fileManager = Case.getCurrentCase().getServices().getFileManager() derived_file = fileManager.findFiles(dataSource, file_name, parent_file_path) numFiles = len(derived_file) if numFiles == 0: self.log(Level.INFO, "File Does Not Exists ==> " + str(file_name)) return False else: for file in derived_file: self.log(Level.INFO, "File Exists ==> " + str(file_name)) if parent_file_path == file.parentPath: self.log(Level.INFO, "File Exists ==> " + str(file_name)) return True self.log(Level.INFO, "File Does Not Exists ==> " + str(file_name)) return False
def process(self, dataSource, progressBar): errors = [] fileManager = Case.getCurrentCase().getServices().getFileManager() analyzers = [contact.ContactAnalyzer(), calllog.CallLogAnalyzer(), textmessage.TextMessageAnalyzer(), tangomessage.TangoMessageAnalyzer(), wwfmessage.WWFMessageAnalyzer(), googlemaplocation.GoogleMapLocationAnalyzer(), browserlocation.BrowserLocationAnalyzer(), cachelocation.CacheLocationAnalyzer()] self.log(Level.INFO, "running " + str(len(analyzers)) + " analyzers") progressBar.switchToDeterminate(len(analyzers)) n = 0 for analyzer in analyzers: if self.context.dataSourceIngestIsCancelled(): return IngestModule.ProcessResult.OK try: analyzer.analyze(dataSource, fileManager, self.context) n += 1 progressBar.progress(n) except Exception as ex: errors.append("Error running " + analyzer.__class__.__name__) self.log(Level.SEVERE, traceback.format_exc()) errorMessage = [] # NOTE: this isn't used? errorMessageSubject = "" # NOTE: this isn't used? msgLevel = IngestMessage.MessageType.INFO if errors: msgLevel = IngestMessage.MessageType.ERROR errorMessage.append("Errors were encountered") errorMessage.append("<ul>") # NOTE: this was missing in the original java code for msg in errors: errorMessage.extend(["<li>", msg, "</li>\n"]) errorMessage.append("</ul>\n") if len(errors) == 1: errorMsgSubject = "One error was found" else: errorMsgSubject = "errors found: " + str(len(errors)) else: errorMessage.append("No errors") errorMsgSubject = "No errors" return IngestModule.ProcessResult.OK