def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() imageFiles = dataSource.getPaths() imageFile = os.path.basename(imageFiles[0]) exportFile = os.path.join(Case.getCurrentCase().getExportDirectory(), str(imageFile) + "_hashset.txt") #self.log(Level.INFO, "create Directory " + moduleDirectory) sql_statement = 'select name, md5 from tsk_files where md5 <> "";' skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() with open(exportFile, 'w') as f: while resultSet.next(): f.write(resultSet.getString("md5") + "\t" + resultSet.getString("name") + "\n") dbquery.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Create_DS_Hashset", " Hashset Create For Datasource " + imageFile ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.6.0/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger = Logger.getLogger(SampleJythonDataSourceIngestModuleFactory.moduleName) # we don't know how much work there is yet progressBar.switchToIndeterminate() autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK logger.logp(Level.INFO, SampleJythonDataSourceIngestModule.__name__, "process", "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def process(self, dataSource, progressBar): progressBar.switchToIndeterminate() fileManager = Case.getCurrentCase().getServices().getFileManager() ###---EDIT HERE---### files = fileManager.findFiles(dataSource, "%.doc", "%") ###---EDIT HERE---### numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; ###---EDIT HERE---### reportPath = os.path.join(Case.getCurrentCase().getCaseDirectory(), "Reports", "YARA.txt") ###---EDIT HERE---### reportHandle = open(reportPath, 'w') for file in files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK if (str(file.getKnown()) != "KNOWN"): exportPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId())+"."+file.getNameExtension()) ###---EDIT HERE---### ContentUtils.writeToFile(file, File(exportPath)) subprocess.Popen([self.path_to_exe, self.path_to_rules, exportPath], stdout=reportHandle).communicate()[0] ###---EDIT HERE---### reportHandle.write(file.getParentPath()+file.getName()+'\n\n') self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 progressBar.progress(fileCount) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "YARA Scan", "Scanned %d Files" % numFiles) IngestServices.getInstance().postMessage(message) reportHandle.close() Case.getCurrentCase().addReport(reportPath, "YARA Scan", "Scanned %d Files" % numFiles) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to ???????") self.log(Level.INFO, "ending process, Just before call to ??????") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "GUI_Test", " GUI_Test Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, file): # If the file has a txt extension, post an artifact to the blackboard. if file.getName().find("test") != -1: art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), "Sample Jython File Ingest Module", "Text Files") art.addAttribute(att) # Read the contents of the file. inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 len = inputStream.read(buffer) while (len != -1): totLen = totLen + len len = inputStream.read(buffer) # Send the size of the file to the ingest messages in box. msgText = "Size of %s is %d bytes" % ((file.getName(), totLen)) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython File IngestModule", msgText) ingestServices = IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Configure progress bar for 2 tasks progressBar.switchToDeterminate(2) autopsyCase = Case.getCurrentCase() sleuthkitCase = autopsyCase.getSleuthkitCase() services = Services(sleuthkitCase) fileManager = services.getFileManager() # Get count of files with "test" in name. fileCount = 0; files = fileManager.findFiles(dataSource, "%test%") for file in files: fileCount += 1 progressBar.progress(1) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Get files by creation time. currentTime = System.currentTimeMillis() / 1000 minTime = currentTime - (14 * 24 * 60 * 60) # Go back two weeks. otherFiles = sleuthkitCase.findAllFilesWhere("crtime > %d" % minTime) for otherFile in otherFiles: fileCount += 1 progressBar.progress(1); if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK; #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK;
def process(self, dataSource, progressBar): # we don't know how much work there is yet #progressBar.switchToIndeterminate() # get current case and the TeraCopy main.db abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "main.db", "%TeraCopy%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; moduleName = ProcessTeraCopyDbIngestModuleFactory.moduleName # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "TeraCopy") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) filePathId = {} for file in files: fileName = file.getName() if fileName.endswith(".db"): filePathId[file.getParentPath()] = file.getId() #self.log(Level.INFO, "file path and id ==> " + str(file.getParentPath()) + " <> " + str(file.getId()) + " <> " + str(fileName)) if numFiles > 0: for artifact in self.stringColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") for artifact in self.dateColumns: try: attID = skCase.addArtifactAttributeType(artifact[0], BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, artifact[1]) #self.log(Level.INFO, "attribute id for " + artifact[0] + " == " + str(attID)) except: self.log(Level.INFO, "Attributes Creation Error, " + artifact[0] + " ==> ") try: #self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_TERACOPY_DB") artID_art = skCase.addArtifactType("TSK_TERACOPY_DB", "Teracopy History DB") except: self.log(Level.INFO, "Artifacts Creation Error, artifact TSK_TERACOPY_DB exists. ==> ") artTeraCopyId = skCase.getArtifactTypeID("TSK_TERACOPY_DB") #self.log(Level.INFO, "Artifact id ==> " + str(artTeraCopyId)) artTeraCopy = skCase.getArtifactType("TSK_TERACOPY_DB") moduleName = ProcessTeraCopyDbIngestModuleFactory.moduleName # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions fileId = filePathId[file.getParentPath()] extractedFile = os.path.join(temporaryDirectory, str(fileId) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) userpath = file.getParentPath() username = userpath.split('/') #self.log(Level.INFO, "Getting Username " + username[2] ) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extractedFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select name TSK_HISTORY_FILE, SOURCE TSK_SOURCE_LOCATION, target TSK_TARGET_LOCATION, " + \ " CASE operation WHEN 1 THEN 'Copy' WHEN 2 THEN 'Move' WHEN 3 THEN 'Test' WHEN 6 THEN " + \ " 'Delete' END TSK_OPERATION_TYPE, strftime('%s', started) TSK_DATETIME_START, " + \ " strftime('%s', finished) TSK_DATETIME_END from list") #self.log(Level.INFO, "query list table") except SQLException as e: self.log(Level.INFO, "Error querying database for list tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK while resultSet.next(): historyFile = resultSet.getString("TSK_HISTORY_FILE") fileManagerHist = Case.getCurrentCase().getServices().getFileManager() histFiles = fileManagerHist.findFiles(dataSource, historyFile + "%", "%TeraCopy%") numHistFiles = len(histFiles) #self.log(Level.INFO, "Number of files ==> " + str(numHistFiles)) #self.log(Level.INFO, "Files ==> " + str(histFiles)) sourceLocation = resultSet.getString('TSK_SOURCE_LOCATION') targetLocation = resultSet.getString('TSK_TARGET_LOCATION') operationType = resultSet.getString('TSK_OPERATION_TYPE') startTime = resultSet.getInt('TSK_DATETIME_START') endTime = resultSet.getInt('TSK_DATETIME_END') for histFile in histFiles: extractedHistFile = os.path.join(temporaryDirectory, str(histFile.getId()) + "-" + historyFile) #self.log(Level.INFO, "History File ==> " + extractedHistFile) ContentUtils.writeToFile(histFile, File(extractedHistFile)) try: # Class.forName("org.sqlite.JDBC").newInstance() dbConnHist = DriverManager.getConnection("jdbc:sqlite:%s" % extractedHistFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedHistFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmtHist = dbConnHist.createStatement() resultSetHist = stmtHist.executeQuery("SELECT SOURCE TSK_FILE_PATH, CASE State WHEN 0 THEN 'Added' " + \ " WHEN 1 THEN 'OK' WHEN 2 THEN 'Verified' " + \ " WHEN 3 THEN 'Error' WHEN 4 THEN 'Skipped' WHEN 5 THEN 'Deleted' " + \ " WHEN 6 THEN 'Moved' END TSK_OPERATION_STATE, SIZE TSK_FILE_SIZE, " + \ " Attributes TSK_ATTRIBUTES, CASE IsFolder WHEN 0 THEN '' WHEN 1 THEN 'Yes' " + \ " END TSK_ISFOLDER, strftime('%s', Creation) TSK_DATETIME_CREATED, " + \ " strftime('%s', Access) TSK_DATETIME_ACCESSED, " + \ " strftime('%s', Write) TSK_DATETIME_MODIFIED, " + \ " SourceCRC TSK_SOURCE_CRC, TargetCRC TSK_TARGET_CRC, Message TSK_MESSAGE " + \ " FROM Files ") #self.log(Level.INFO, "query list table") except SQLException as e: self.log(Level.INFO, "Error querying database for list tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK meta = resultSetHist.getMetaData() columnCount = meta.getColumnCount() columnNames = [] # self.log(Level.INFO, "Number of Columns in the table ==> " + str(columnCount)) for x in range (1, columnCount + 1): # self.log(Level.INFO, "Column Count ==> " + str(x)) # self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) columnNames.append(meta.getColumnLabel(x)) while resultSetHist.next(): ## Cycle through each row and get the data ## self.log(Level.INFO, "Start PRocessing") # while resultSet.next(): try: artifact = file.newArtifact(artTeraCopyId) attributes = ArrayList() attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_HISTORY_FILE'), moduleName, historyFile)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_SOURCE_LOCATION'), moduleName, sourceLocation)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_TARGET_LOCATION'), moduleName, targetLocation)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_OPERATION_TYPE'), moduleName, operationType)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_DATETIME_START'), moduleName, startTime)) attributes.add(BlackboardAttribute(skCase.getAttributeType('TSK_DATETIME_END'), moduleName, endTime)) for x in range(0, columnCount): if columnNames[x] in self.dateColumn: # self.log(Level.INFO, "Date ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSetHist.getInt(columnNames[x]))) else: # self.log(Level.INFO, "ColumnName ==> " + columnNames[x]) attributes.add(BlackboardAttribute(skCase.getAttributeType(columnNames[x]), moduleName, resultSetHist.getString(columnNames[x]))) # self.log(Level.INFO, "Column Count ==> " + str(x)) artifact.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artifact) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from files table (" + e.getMessage() + ")") # Close the database statement try: stmtHist.close() dbConnHist.close() except: pass try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "TeraCopy DB", " TeraCopy DB Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # Setup SQL Statements and other variables artifactSQL = 'select distinct "artifact_type_name:" a1, art_type.type_name a2, "artifact_display_name:" b1, art_type.display_name b2, ' + \ ' "datasource_obj_id:" c1, img_name.obj_id c2, "datasource_name:" d1, img_name.name d2, art_type.type_name e1 from blackboard_artifact_types art_type, ' + \ ' tsk_image_names img_name, blackboard_artifacts art, blackboard_attributes att where img_name.obj_id = art.data_source_obj_id ' + \ ' and img_name.sequence = 0 and art.artifact_type_id = art_type.artifact_type_id and att.artifact_id = art.artifact_id ' + \ ' and att.artifact_id = ' artifactSQL2 = 'select att_type.display_name date_type, case att.value_type when 0 then value_text when 1 then value_int32 when 2 then value_int64 ' +\ ' when 3 then value_double when 4 then value_byte when 5 then value_int64 end date_value from blackboard_attributes att, ' + \ ' blackboard_attribute_types att_type where att_type.attribute_type_id = att.attribute_type_id and att.value_type = 5 ' + \ ' and att.artifact_id = ' artifactSQL3 = 'select att_type.display_name name, case att.value_type when 0 then value_text when 1 then value_int32 when 2 then value_int64 ' +\ ' when 3 then value_double when 4 then value_byte when 5 then value_int64 end value from blackboard_attributes att, ' + \ ' blackboard_attribute_types att_type where att_type.attribute_type_id = att.attribute_type_id and att.value_type <> 5 ' + \ ' and att.artifact_id = ' sketchName = self.sketchName sketchDescription = self.sketchDescription timelineName = sketchName + "_Timeline" timelineIndex = sketchName + "_Index" jsonFileName = "Autopsy.jsonl" skCase = Case.getCurrentCase().getSleuthkitCase() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing tempDirectory = Case.getCurrentCase().getTempDirectory() tempDir = os.path.join(tempDirectory, "Timesketch") self.log(Level.INFO, "create Directory " + tempDir) try: os.mkdir(tempDir) except: self.log(Level.INFO, "Timesketch directory already exists" + tempDir) artList = [] dbquery = skCase.executeQuery("select distinct artifact_id from blackboard_attributes where value_type = 5;") resultSet = dbquery.getResultSet() while resultSet.next(): artifactDict = {} dbquery2 = skCase.executeQuery(artifactSQL + resultSet.getString("artifact_id")) resultSet2 = dbquery2.getResultSet() while resultSet2.next(): artifactDict[resultSet2.getString("a1")] = resultSet2.getString("a2") artifactDict[resultSet2.getString("b1")] = resultSet2.getString("b2") artifactDict[resultSet2.getString("c1")] = resultSet2.getString("c2") artifactDict[resultSet2.getString("d1")] = resultSet2.getString("d2") dbquery3 = skCase.executeQuery(artifactSQL2 + resultSet.getString("artifact_id")) resultSet3 = dbquery3.getResultSet() while resultSet3.next(): artifactDict[resultSet3.getString("date_type")] = resultSet3.getString("date_value") artifactDict["message"] = resultSet2.getString("e1") artifactDict["timestamp_desc"] = resultSet3.getString("date_type") artifactDict["timestamp"] = resultSet3.getString("date_value") dbquery4 = skCase.executeQuery(artifactSQL3 + resultSet.getString("artifact_id")) resultSet4 = dbquery4.getResultSet() while resultSet4.next(): artValue = resultSet4.getString("value") artName = resultSet4.getString("name") if isinstance(artValue, unicode): #print (artValue) artifactDict[artName] = artValue.translate({0x2014: None}) else: artifactDict[artName] = artValue dbquery4.close() dbquery3.close() dbquery2.close() artList.append(artifactDict) dbquery.close() jsonFileNamePath = os.path.join(tempDir, jsonFileName) with open(jsonFileNamePath, 'a') as f: for art in artList: json.dump(art, f) f.write("\n") # Get file Times fileList = [] numFiles = 0 dbquery = skCase.executeQuery("Select obj_id from tsk_files") resultSet = dbquery.getResultSet() while resultSet.next(): dbquery2 = skCase.executeQuery("Select ctime, crtime, atime, mtime, parent_path||name from tsk_files where obj_id = " + \ resultSet.getString("obj_id")) resultSet2 = dbquery2.getResultSet() meta = resultSet2.getMetaData() columnCount = meta.getColumnCount() column_names = [] # ?? Do I need this while resultSet2.next(): for i in range (1,int(columnCount)): fileDict = {} if resultSet2.getString(i) is None: fileDict[meta.getColumnLabel(i)] = "" fileDict["message"] = "TSK : " fileDict["timestamp"] = 0 else: fileDict[meta.getColumnLabel(i)] = resultSet2.getString(i) fileDict["message"] = "TSK : " + resultSet2.getString(5) fileDict["timestamp"] = resultSet2.getString(i) fileDict["timestamp_desc"] = meta.getColumnLabel(i) dbquery3 = skCase.executeQuery("Select * from tsk_files where obj_id = " + resultSet.getString("obj_id")) resultSet3 = dbquery3.getResultSet() meta3 = resultSet3.getMetaData() columnCount3 = meta3.getColumnCount() while resultSet3.next(): for x in range(1,int(columnCount3)): colHead = meta3.getColumnLabel(x) if (('ctime' in colHead) or ('crtime' in colHead) or ('atime' in colHead) or ('mtime' in colHead)): #self.log(Level.INFO, "Heading ==> " + colHead ) pass else: if resultSet3.getString(x) is None: fileDict[colHead] = "" else: fileDict[colHead] = resultSet3.getString(x) dbquery3.close() fileList.append(fileDict) numFiles = numFiles + 1 if numFiles > 1000: with open(jsonFileNamePath, 'a') as f: for file in fileList: json.dump(file, f) f.write("\n") numFiles = 0 fileList = [] dbquery2.close() dbquery.close() # Check Messages # TS001 - Invalid arguments # TS002 - Sketch Created # TS003 - Sketch Already Exists # TS004 - Error Looking up Sketch # TS005 - Timeline Added # TS006 - Timeline Not Created # Try to run this 3 times in case you add a sketch but for some reason you fail to add a the timeline, # you may be able to add the timeline on another run, no reason to make the user run this multple times # when we can do that as well. emessage = "Internal Error contact plugin maker" for z in range(3): self.log(Level.INFO, "command ==> " + self.path_to_Timesketch_exe + " " + sketchName + " " + jsonFileNamePath + " " + self.IP_Address + " " + self.Port_Number + " " + self.userName + " " + self.password) pipe = Popen([self.path_to_Timesketch_exe, sketchName, jsonFileNamePath, self.IP_Address, self.Port_Number, self.userName, self.password], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if "TS005" in out_text: if "TS002" in out_text: emessage = "Sketch added, Timeline added" break elif "TS003" in out_text: emessage = "Sketch already exists, Timeline added" break elif "TS001" in out_text: emessage = "invalid parameters passed in, missing parameters" break elif "TS006" in out_text: if "TSK004" in out_text: emessage = "Error Looking up sketch, Timeline Not Created" message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Timesketch File Submit", emessage ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the status of the progress bar progressBar.progress("Creating/Mounting the Virtual Disk") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); # Create the directory to write the vhd file to mod_dir = Case.getCurrentCase().getModulesOutputDirAbsPath() vdisk_dir = os.path.join(mod_dir, "Preview_VHD") try: os.mkdir(vdisk_dir) except: self.log(Level.INFO, "Virtual disk directory already exists in Module Directory") vdisk_name = os.path.join(vdisk_dir, Case.getCurrentCase().getNumber() + "_preview.vhd") # Get the size of the image file in megs size_of_disk = dataSource.getSize() // 1048576 self.log(Level.INFO, "size of disk is ==> " + str(size_of_disk)) (vdisk_create_script, vdisk_unmount_script, vdisk_mount_script, drive_letter) = self.Create_Diskpart_Script(size_of_disk, vdisk_name) # Run Diskpart using the scripts that will create a VHD # If disk already exists then just mount it otherwise create it, mount it and format it if os.path.exists(vdisk_name): self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_mount_script) pipe = Popen(["diskpart.exe", "-S", vdisk_mount_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) else: self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_create_script) pipe = Popen(["diskpart.exe", "-S", vdisk_create_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Make the top level directory the datasource name try: data_source_dir = os.path.join(drive_letter + "\\", dataSource.getName()) os.mkdir(data_source_dir) except: self.log(Level.INFO, "Data source Directory already exists") # Create log file for the number of extensions found try: mod_log_file = os.path.join(vdisk_dir, "File_Extensions_Written_Log_" + dataSource.getName() + ".csv") self.log(Level.INFO, "Output Directory is ==> " + mod_log_file) mod_log = open(mod_log_file, "w") mod_log.write('Directory_In,File_Extension,Number_Of_Files_Written \n') out_log_file = os.path.join(drive_letter + "\\", "File_Extensions_Written_Log_" + dataSource.getName() + ".csv") self.log(Level.INFO, "Output Directory is ==> " + out_log_file) out_log = open(out_log_file, "w") out_log.write('Directory_In,File_Extension,Number_Of_Files_Written \n') except: self.log(Level.INFO, "Log File creation error") # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.file_extension_db) except SQLException as e: self.log(Level.INFO, "Could not open File Extension database " + self.file_extension_db + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Get all the file extensions that we want to find and export to the Preview Disk try: stmt = dbConn.createStatement() SQL_Statement = "select Output_Directory, File_Extension from File_Extensions_To_Export" self.log(Level.INFO, "SQL Statement --> " + SQL_Statement) resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for File_Extensions_To_Export table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # Update the progress bar with the type of Document we are extracting progressBar.progress("Extracting " + resultSet.getString('Output_Directory') + " Files") fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%." + resultSet.getString("File_Extension"), "") numFiles = len(files) self.log(Level.INFO, "Number of files found for file extension " + resultSet.getString("File_Extension") + " ==> " + str(numFiles)) try: mod_log.write(resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") out_log.write(resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") except: self.log(Level.INFO, " Error Writing Log File ==> " + resultSet.getString('Output_Directory') + "," + resultSet.getString("File_Extension") + "," + str(numFiles) + "\n") # Need to create log file here # Try and create directory to store files in, may already be created so we will ignore if it does try: dir_to_write_to = os.path.join(data_source_dir, resultSet.getString('Output_Directory')) if not os.path.exists(dir_to_write_to): os.mkdir(dir_to_write_to) except: self.log(Level.INFO, "Directory " + resultSet.getString('Output_Directory') + " already exists.") # Write all the files to the vhd for file in files: lclfile = os.path.join(dir_to_write_to, str(file.getId()) + "-" + file.getName()) #self.log(Level.INFO, "File to write ==> " + lclfile) ContentUtils.writeToFile(file, File(lclfile)) except: self.log(Level.INFO, "Error in processing sql statement") # Close the log file try: mod_log.close() out_log.close() except: self.log(Level.INFO, "Error closing log files, they might not exist") # Set the progress bar to unmounting progressBar.progress("Unmounting The Virtual Disk") # Run Diskpart using the scripts to unmount the VHD self.log(Level.INFO, "Running prog ==> " + "diskpart.exe " + " -S " + vdisk_unmount_script) pipe = Popen(["diskpart.exe", "-S", vdisk_unmount_script], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Clean up stmt.close() dbConn.close() #Clean up prefetch directory and files try: shutil.rmtree(os.path.join(Case.getCurrentCase().getTempDirectory(), "vdisk_scripts")) except: self.log(Level.INFO, "removal of vdisk script directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "VDiskCreate", " VDiskCreate Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log(Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: name = resultSet.getString("name") email = resultSet.getString("email") phone = resultSet.getString("phone") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID(), ContactsDbIngestModuleFactory.moduleName, name)) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(), ContactsDbIngestModuleFactory.moduleName, email)) art.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), ContactsDbIngestModuleFactory.moduleName, phone)) # This will work in 4.0.1 and beyond #try: # # index the artifact for keyword search # blackboard.indexArtifact(art) #except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def parse_recentApps(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/Windows/System32/wbem/Repository/") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Recently_Used") except: self.log(Level.INFO, "Recently Used Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 if (file.getName() == '.' or file.getName() == '..'): self.log(Level.INFO, "Parent or Root Directory File not writing") else: # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\Recently_Used", file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Running prog ==> " + self.path_to_recentApps_exe + " win7 " + Temp_Dir + "\Recently_Used " + " " + \ Temp_Dir + "\Recently_Used\\recentlyUsedApps.db3") pipe = Popen([self.path_to_recentApps_exe, "win7", Temp_Dir + "\Recently_Used", Temp_Dir + "\Recently_Used\\recentlyUsedApps.db3"], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory() + "\Recently_Used", "recentlyUsedApps.db3") if ("Exiting" in out_text): message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "CCM Recently Used Apps", " Error in CCM Recently Used Apps module " ) IngestServices.getInstance().postMessage(message) else: # Add custom Artifact to blackboard try: self.log(Level.INFO, "Begin Create New Artifacts ==> TSK_CCM_RECENTLY_USED_APPS") artID_art = skCase.addArtifactType("TSK_CCM_RECENTLY_USED_APPS", "WMI Recently Used Apps") except: self.log(Level.INFO, "Artifacts Creation Error, artifact TSK_CCM_RECENTLY_USED_APPS exists. ==> ") # Add Custom attributes to blackboard try: attID_efn = skCase.addArtifactAttributeType("TSK_EXPLORER_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Explorer File Name") except: self.log(Level.INFO, "Attributes Creation Error, Explorer File Name ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_LAST_USED_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Last Used Time") except: self.log(Level.INFO, "Attributes Creation Error, Last Used Time ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_TIME_ZONE_OFFSET", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Time Zone Offset") except: self.log(Level.INFO, "Attributes Creation Error, Time Zone Offset ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_LAUNCH_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Launch Count") except: self.log(Level.INFO, "Attributes Creation Error, Launch Count ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_ORIG_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Original File Name") except: self.log(Level.INFO, "Attributes Creation Error, Original File Name ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_FILE_DESC", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Description") except: self.log(Level.INFO, "Attributes Creation Error, File Description ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_PROD_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Product Name") except: self.log(Level.INFO, "Attributes Creation Error, Product Name ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_PROD_VERSION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Product Version") except: self.log(Level.INFO, "Attributes Creation Error, Product Version ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_FILE_VERSION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Version") except: self.log(Level.INFO, "Attributes Creation Error, File Version ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_ADDITIONAL_PROD_CODES", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Additional Product Codes") except: self.log(Level.INFO, "Attributes Creation Error, Additional Product Codes ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_MSI_VERSION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "MSI Version") except: self.log(Level.INFO, "Attributes Creation Error, MSI Version ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_MSI_DISPLAY_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "MSI Display Name") except: self.log(Level.INFO, "Attributes Creation Error, MSI Display Name ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_PRODUCT_CODE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Product Code") except: self.log(Level.INFO, "Attributes Creation Error, Product Code ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_SOFTWARE_PROP_HASH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Software Property Hash") except: self.log(Level.INFO, "Attributes Creation Error, Software Property Hash ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_PROD_LANG", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Product Language") except: self.log(Level.INFO, "Attributes Creation Error, Product Language ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_FILE_PROP_HASH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Property Hash") except: self.log(Level.INFO, "Attributes Creation Error, File Property Hash ==> ") try: attID_efn = skCase.addArtifactAttributeType("TSK_MSI_PUBLISHER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "MSI Publisher") except: self.log(Level.INFO, "Attributes Creation Error, MSI Publisher ==> ") for file in files: if (file.getName() == "OBJECTS.DATA"): # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory() + "\Recently_Used", "recentlyUsedApps.db3") self.log(Level.INFO, "Path the recentlyUsedApps.db3 database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) recentlyUsedApps.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the history_visits table in the database and get all columns. try: stmt = dbConn.createStatement() recently_used_sql = "select FolderPath 'TSK_PATH', ExplorerFileName 'TSK_EXPLORER_FILE_NAME', " + \ "FileSize 'TSK_FILE_SIZE', LastUserName 'TSK_USER_ID', strftime('%s',LastUsedTime) " + \ "'TSK_LAST_USED_TIME', TimeZoneOffset 'TSK_TIME_ZONE_OFFSET', LaunchCount " + \ "'TSK_LAUNCH_COUNT', OriginalFileName 'TSK_ORIG_FILE_NAME', FileDescription " + \ "'TSK_FILE_DESC', CompanyName 'TSK_ORGANIZATION', ProductName 'TSK_PROD_NAME', " + \ "ProductVersion 'TSK_PROD_VERSION', FileVersion 'TSK_FILE_VERSION', " + \ "AdditionalProductCodes 'TSK_ADDITIONAL_PROD_CODES', msiVersion " + \ "'TSK_MSI_VERSION', msiDisplayName 'TSK_MSI_DISPLAY_NAME', " + \ "ProductCode 'TSK_PRODUCT_CODE', SoftwarePropertiesHash " + \ "'TSK_SOFTWARE_PROP_HASH', ProductLanguage 'TSK_PROD_LANG', " + \ "FilePropertiesHash 'TSK_FILE_PROP_HASH', msiPublisher 'TSK_MSI_PUBLISHER' " + \ "from recently_used;" self.log(Level.INFO, recently_used_sql) resultSet = stmt.executeQuery(recently_used_sql) self.log(Level.INFO, "query recently_used table") except SQLException as e: self.log(Level.INFO, "Error querying database for recently_used table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK artID_hst = skCase.getArtifactTypeID("TSK_CCM_RECENTLY_USED_APPS") artID_hst_evt = skCase.getArtifactType("TSK_CCM_RECENTLY_USED_APPS") meta = resultSet.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range (1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType(col_name) self.log(Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log(Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, resultSet.getString(col_name))) except: self.log(Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, resultSet.getInt(col_name))) except: self.log(Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, resultSet.getInt(col_name))) except: self.log(Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, resultSet.getInt(col_name))) except: self.log(Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, resultSet.getString(col_name))) except: self.log(Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute(BlackboardAttribute(attID_ex1, ParseRecentlyUsedAppsIngestModuleFactory.moduleName, int(resultSet.getString(col_name)))) except: self.log(Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log(Level.INFO, "Error getting values from web_history table (" + e.getMessage() + ")") IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseRecentlyUsedAppsIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt.close() dbConn.close() # Clean up try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of Recently Used database failed ") #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\Recently_Used" + "\\" + file.getName()) except: self.log(Level.INFO, "removal of Recently Used files failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir + "\Recently_Used") except: self.log(Level.INFO, "removal of recently used directory failed " + Temp_Dir)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO,dataSource.getUniquePath()) # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() self.art_contacts = self.create_artifact_type("Labcif-MSTeams_CONTACTS_"," Contacts", blackboard) self.art_messages = self.create_artifact_type("Labcif-MSTeams_MESSAGES_"," MESSAGES", blackboard) self.art_messages_reacts = self.create_artifact_type("Labcif-MSTeams_MESSAGES_REACTS"," REACTS", blackboard) self.art_messages_files = self.create_artifact_type("Labcif-MSTeams_MESSAGES_FILES"," FILES", blackboard) self.art_call = self.create_artifact_type("Labcif-MSTeams_CALLS_", " Call history", blackboard) self.art_call_one_to_one = self.create_artifact_type("Labcif-MSTeams_CALLS_ONE_TO_ONE", " Call history one to one", blackboard) self.art_teams = self.create_artifact_type("Labcif-MSTeams_TEAMS_"," Teams", blackboard) # contactos self.att_name = self.create_attribute_type('Labcif-MSTeams_CONTACT_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Name", blackboard) self.att_email = self.create_attribute_type('Labcif-MSTeams_CONTACT_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Email", blackboard) self.att_orgid = self.create_attribute_type('Labcif-MSTeams_CONTACT_ORGID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Orgid", blackboard) self.att_user_contacts = self.create_attribute_type('Labcif-MSTeams_USERNAME_CONTACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_contacts = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_CONTACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # reacts self.att_message_id_reacts = self.create_attribute_type('Labcif-MSTeams_MESSAGE_ID_REACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Message ID", blackboard) self.att_sender_name_react = self.create_attribute_type('Labcif-MSTeams_MESSAGE_SENDER_NAME_REACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Who reacted", blackboard) self.att_reacted_with = self.create_attribute_type('Labcif-MSTeams_MESSAGE_FILE_LOCAL_EMOJI_REACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Emoji", blackboard) self.att_react_time= self.create_attribute_type('Labcif-MSTeams_MESSAGE_REACT_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "React time", blackboard) self.att_user_message_reacts = self.create_attribute_type('Labcif-MSTeams_USERNAME_MESSAGE_REACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_reacts = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_REACTS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # mensagens self.att_message_id = self.create_attribute_type('Labcif-MSTeams_MESSAGE_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Message ID", blackboard) self.att_message = self.create_attribute_type('Labcif-MSTeams_MESSAGE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Message", blackboard) self.att_sender_name = self.create_attribute_type('Labcif-MSTeams_SENDER', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Senders name", blackboard) self.att_time = self.create_attribute_type('Labcif-MSTeams_TIME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Message time", blackboard) self.att_cvid = self.create_attribute_type('Labcif-MSTeams_CONVERSATION_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "CV", blackboard) self.att_user_message = self.create_attribute_type('Labcif-MSTeams_USERNAME_MESSAGE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_message = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_MESSAGES', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # ficheiros self.att_message_id_files = self.create_attribute_type('Labcif-MSTeams_MESSAGE_ID', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Message ID", blackboard) self.att_file_name = self.create_attribute_type('Labcif-MSTeams_MESSAGE_FILE_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File name", blackboard) self.att_file_local = self.create_attribute_type('Labcif-MSTeams_MESSAGE_FILE_LINK', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Link", blackboard) self.att_user_message_files = self.create_attribute_type('Labcif-MSTeams_USERNAME_MESSAGE_FILES', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_files = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_FILES', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # calls one to one self.att_date_start_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_TIME_START', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one time start", blackboard) self.att_date_finish_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_TIME_FINISH', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one time finish", blackboard) self.att_creator_name_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_CREATOR_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one Creator Name", blackboard) self.att_creator_email_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_CREATOR_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one Creator Email", blackboard) self.att_participant_name_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_PARTICIPANT_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one Participant Name", blackboard) self.att_participant_email_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_PARTICIPANT_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one Participant Email", blackboard) self.att_state_one_to_one = self.create_attribute_type('Labcif-MSTeams_CALL_ONE_TO_ONE_STATE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call one to one state", blackboard) self.att_user_calls_one_to_one = self.create_attribute_type('Labcif-MSTeams_USERNAME_CALLS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_calls_one_to_one = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_CALLS_ONE_TO_ONE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # teams self.att_cv_id_teams = self.create_attribute_type('Labcif-MSTeams_CV_ID_TEAMS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Conversation ID teams", blackboard) self.att_creator_name_teams = self.create_attribute_type('Labcif-MSTeams_TEAMS_CREATOR_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Teams Creator Name", blackboard) self.att_creator_email_teams = self.create_attribute_type('Labcif-MSTeams_TEAMS_CREATOR_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Teams Creator Email", blackboard) self.att_participant_name_teams = self.create_attribute_type('Labcif-MSTeams_TEAMS_PARTICIPANT_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Teams Participant Name", blackboard) self.att_participant_email_teams = self.create_attribute_type('Labcif-MSTeams_teams_PARTICIPANT_EMAIL_ONE_TO_ONE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Teams Participant Email", blackboard) self.att_user_teams = self.create_attribute_type('Labcif-MSTeams_USERNAME_TEAMS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_teams = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_TEAMS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # calls self.att_date = self.create_attribute_type('Labcif-MSTeams_CALL_DATE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call Date", blackboard) self.att_creator_name = self.create_attribute_type('Labcif-MSTeams_CALL_CREATOR_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Creator Name", blackboard) self.att_creator_email = self.create_attribute_type('Labcif-MSTeams_CALL_CREATOR_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Creator Email", blackboard) self.att_count_people_in = self.create_attribute_type('Labcif-MSTeams_CALL_AMOUNT_PEOPLE_IN', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Amount of people in call", blackboard) self.att_duration = self.create_attribute_type('Labcif-MSTeams_CALL_DURANTION', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Call Duration", blackboard) self.att_participant_name = self.create_attribute_type('Labcif-MSTeams_CALL_PARTICIPANT_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Participant Name", blackboard) self.att_participant_email = self.create_attribute_type('Labcif-MSTeams_CALL_PARTICIPANT_EMAIL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Participant Email", blackboard) self.att_user_calls = self.create_attribute_type('Labcif-MSTeams_USERNAME_CALLS', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User", blackboard) self.att_folder_extract_calls = self.create_attribute_type('Labcif-MSTeams_FOLDER_EXTRACT_CALL', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Folder of extraction", blackboard) # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.ldb","https_teams.microsoft.com_") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. src = file.getParentPath() pathSplited=src.split("/") user=pathSplited[2] if user not in users: users.append(user) buffer = jarray.zeros(file.getSize(), "b") file.read(buffer,0,file.getSize()) if "lost" not in src and "Roaming" in file.getParentPath() and "ProjetoEI" not in file.getParentPath(): if src not in paths: tm = datetime.fromtimestamp(math.floor(tim.time())).strftime("%m-%d-%Y_%Hh-%Mm-%Ss") paths[src]="Analysis_Autopsy_LDB_{}_{}".format(user,tm) if not os.path.exists(os.path.join(projectEIAppDataPath,paths[src])): try: os.mkdir(os.path.join(projectEIAppDataPath,paths[src])) except OSError: print("Creation of the directory %s failed" % os.path.join(projectEIAppDataPath,paths[src])) else: print("Successfully created the directory %s " % os.path.join(projectEIAppDataPath,paths[src])) f = open(os.path.join(os.path.join(projectEIAppDataPath,paths[src]),file.getName()),"wb") f.write(buffer.tostring()) f.close() # try: # # index the artifact for keyword search # blackboard.indexArtifact(art) # except Blackboard.BlackboardException as e: # self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()+str(e)) # To further the example, this code will read the contents of the file and count the number of bytes # Update the progress bar progressBar.progress(fileCount) for src, path in paths.items(): complementaryFiles=fileManager.findFilesByParentPath(dataSource.getId(),src) for file in complementaryFiles: if "lost" not in file.getParentPath() and ".ldb" not in file.getName() and "lost" not in file.getName() and "Roaming" in file.getParentPath() and "ProjetoEI" not in file.getParentPath(): if file.getName() == "." or file.getName() == ".." or "-slack" in file.getName(): continue buffer = jarray.zeros(file.getSize(), "b") if src not in paths: tm = datetime.fromtimestamp(math.floor(tim.time())).strftime("%m-%d-%Y_%Hh-%Mm-%Ss") paths[src] = "Analysis_Autopsy_LDB_{}_{}".format(user,tm) if not os.path.exists(os.path.join(projectEIAppDataPath,paths[src])): try: os.mkdir(os.path.join(projectEIAppDataPath,paths[src])) except OSError: print("Creation of the directory %s failed" % os.path.join(projectEIAppDataPath,paths[src])) else: print("Successfully created the directory %s " % os.path.join(projectEIAppDataPath,paths[src])) try: f = open(os.path.join(os.path.join(projectEIAppDataPath,paths[src]),file.getName()),"a") file.read(buffer,0,file.getSize()) f.write(buffer.tostring()) f.close() except : self.log(Level.INFO,"File Crash") pathModule = os.path.realpath(__file__) indexCutPath=pathModule.rfind("\\") pathModule=pathModule[0:indexCutPath+1] # message = IngestMessage.createMessage( # IngestMessage.MessageType.DATA, Labcif-MSTeamsFactory.moduleName, # str(self.filesFound) + " files found") analysisPath = "" result = {} for key,value in paths.items(): if key not in result: result[key] = value for key, value in result.items(): p = subprocess.Popen([r"{}EI\EI.exe".format(pathModule),"--pathToEI",r"{}EI\ ".format(pathModule), "-a", value],stderr=subprocess.PIPE) out = p.stderr.read() self.log(Level.INFO, out) p.wait() # os.system("cmd /c \"{}EI\\EI.exe\" --pathToEI \"{}EI\\\" -a {}".format(pathModule,pathModule,value)) results=[] pathResults="Analise Autopsy" for u in users: pathLDB="" for key,value in paths.items(): if "Analysis_Autopsy_LDB_{}".format(u) in value: pathLDB=value break for root, dirs, files in os.walk(projectEIAppDataPath, topdown=False): for name in dirs: if pathResults in name and os.stat(os.path.join(projectEIAppDataPath,pathLDB)).st_mtime < os.stat(os.path.join(projectEIAppDataPath,name)).st_mtime: pathsLDB[pathLDB]=os.path.join(projectEIAppDataPath,name) results.append(os.path.join(projectEIAppDataPath,name)) f = open(os.path.join(projectEIAppDataPath,"filesToReport.txt"),"w") for r in results: for files in os.walk(r,topdown=False): for name in files: for fileName in name: if ".csv" in fileName or ".html" in fileName or ".css" in fileName: f.write(os.path.join(r,fileName)+"\n") f.close() f = open(os.path.join(projectEIAppDataPath,"filesToReport.txt"), "r") for line in f: line = line.replace("\n","") pathExtract="" if ".csv" in line: # ok if "EventCall" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_call.getTypeID()) dura=str(int(float(row[4]))) art.addAttribute(BlackboardAttribute(self.att_date, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_creator_name, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_creator_email, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_count_people_in, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_duration, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName,dura )) art.addAttribute(BlackboardAttribute(self.att_participant_name, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[5]))) art.addAttribute(BlackboardAttribute(self.att_participant_email, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[6]))) art.addAttribute(BlackboardAttribute(self.att_user_calls, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[7]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_calls, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() # ok elif "Conversations" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_teams.getTypeID()) art.addAttribute(BlackboardAttribute(self.att_cv_id_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_creator_name_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_creator_email_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_participant_name_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_participant_email_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[4]))) art.addAttribute(BlackboardAttribute(self.att_user_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[5]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_teams, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() # ok elif "CallOneToOne" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_call_one_to_one.getTypeID()) art.addAttribute(BlackboardAttribute(self.att_date_start_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_date_finish_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_creator_name_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_creator_email_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_participant_name_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[4]))) art.addAttribute(BlackboardAttribute(self.att_participant_email_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[5]))) art.addAttribute(BlackboardAttribute(self.att_state_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[6]))) art.addAttribute(BlackboardAttribute(self.att_user_calls_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[7]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_calls_one_to_one, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() elif "Files" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_messages_files.getTypeID()) art.addAttribute(BlackboardAttribute(self.att_message_id_files, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_file_name, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_file_local, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_user_message_files, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_files, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() elif "Mensagens" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break idMessage="" message="" sender="" timee="" cvid="" userMessage="" with open(line) as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: self.log(Level.INFO,str(row)) if rowcount!=0: if len(row) == 1: row = row[0].split(";") idMessage=str(row[0]) message=str(row[1]) timee=str(row[2]) sender=str(row[3]) cvid=str(row[4]) userMessage=str(row[5]) else: partOne = row[0].split(";") idMessage=str(partOne[0]) lastPart=row[len(row)-1].split(";") timee=str(lastPart[1]) sender=str(lastPart[2]) cvid=str(lastPart[3]) userMessage=str(lastPart[4]) message=str(partOne[1])+"," if len(row)!=2: for x in range(1,len(row)-1): message+=str(row[x])+"," message+=str(lastPart[0]) art = dataSource.newArtifact(self.art_messages.getTypeID()) art.addAttribute(BlackboardAttribute(self.att_message_id, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, idMessage)) art.addAttribute(BlackboardAttribute(self.att_message, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, message)) art.addAttribute(BlackboardAttribute(self.att_sender_name, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, sender)) art.addAttribute(BlackboardAttribute(self.att_time, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, timee)) art.addAttribute(BlackboardAttribute(self.att_cvid, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, cvid)) art.addAttribute(BlackboardAttribute(self.att_user_message, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, userMessage)) art.addAttribute(BlackboardAttribute(self.att_folder_extract_message, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() elif "Reacts" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_messages_reacts.getTypeID()) try: art.addAttribute(BlackboardAttribute(self.att_message_id_reacts, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_reacted_with, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_sender_name_react, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_react_time, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_user_message_reacts, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[4]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_reacts, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: pass else: pass except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() elif "Contactos.csv" in line: rowcount=0 for key,value in pathsLDB.items(): if value in line: for k,v in paths.items(): if v == key: pathExtract=k break with io.open(line,encoding="utf-8") as csvfile: reader = csv.reader(x.replace('\0', '') for x in csvfile) for row in reader: # each row is a list try: row = row[0].split(";") if rowcount!=0: art = dataSource.newArtifact(self.art_contacts.getTypeID()) art.addAttribute(BlackboardAttribute(self.att_name, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[0]))) art.addAttribute(BlackboardAttribute(self.att_email, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[1]))) art.addAttribute(BlackboardAttribute(self.att_orgid, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[2]))) art.addAttribute(BlackboardAttribute(self.att_user_contacts, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, str(row[3]))) art.addAttribute(BlackboardAttribute(self.att_folder_extract_contacts, LabcifMSTeamsDataSourceIngestModuleFactory.moduleName, pathExtract)) except: self.log(Level.INFO,"File empty") rowcount+=1 csvfile.close() rowcount=0 #Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Please run MSTeams Report") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%test%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artfiact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # To further the example, this code will read the contents of the file and count the number of bytes inputStream = ReadContentInputStream(file) buffer = jarray.zeros(1024, "b") totLen = 0 readLen = inputStream.read(buffer) while (readLen != -1): totLen = totLen + readLen readLen = inputStream.read(buffer) # Update the progress bar progressBar.progress(fileCount) #Post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Sample Jython Data Source Ingest Module", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check if this is Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # In most Autopsy plugins this is where, the plugins searches for the files it's going to parse (i.e. a Registry hive or Log file) # In this plugin the data were adding to the case comes from outside autopsy, so there isn't really a file to associate the output with # but Autopsy expects the artifacts produced by the plugin to be associated with a file # So, this plugin just selects the very first file in the dataset, and associates the artifacts with that. files = fileManager.findFiles(dataSource, "%") self.log( Level.INFO, "CloudTrail logs will be associated with " + files[0].getName()) Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "Found temporary directory: " + Temp_Dir) mydb = Temp_Dir + "\\Cloudtopsy.db" # Parse some keys self.log( Level.INFO, "Downloading and parsing CloudTrail logs: \"" + self.my_exe + "\" -a \"" + self.Access_Key + "\" -s \"" + self.Secret_Key + "\" -r \"" + self.Region + "\" -b \"" + self.Bucket + "\" -d \"" + mydb + "\"") subprocess.Popen([ self.my_exe, '-a', self.Access_Key, '-s', self.Secret_Key, '-r', self.Region, '-b', self.Bucket, '-d', mydb ]).communicate()[0] try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) except SQLException as e: self.log( Level.INFO, "Could not open database file " + mydb + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Retrieve a list of CloudTrail APIs called try: stmt = dbConn.createStatement() tableSet = stmt.executeQuery( "SELECT name from sqlite_master WHERE type='table' ORDER by name; " ) self.log( Level.INFO, "SQLite Query: SELECT name from sqlite_master WHERE type='table' ORDER by name;" ) except SQLException as e: self.log( Level.INFO, "Error Running Query: SELECT name from sqlite_master WHERE type='table' ORDER by name;" ) return IngestModule.ProcessResult.OK while tableSet.next(): self.List_Of_tables.append(tableSet.getString("name")) # Retrieve a count of CloudTrail APIs called and use it for the Progress Bar try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "SELECT COUNT(*) as count FROM Sqlite_Master WHERE type='table'; " ) self.log( Level.INFO, "SELECT COUNT(*) as count FROM Sqlite_Master WHERE type='table';" ) except SQLException as e: self.log( Level.INFO, "SELECT COUNT(*) as count FROM Sqlite_Master WHERE type='table';" ) return IngestModule.ProcessResult.OK progressBar.switchToDeterminate(int(resultSet.getString("count"))) stmt.close() dbConn.close() # Ingest the tables in mydb in Autopsy count = 0 for table_name in self.List_Of_tables: SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" artifact_name = "TSK_" + table_name.upper() artifact_desc = "CloudTrail: " + table_name.upper() artID_amc = skCase.addArtifactType(artifact_name, artifact_desc) artID_amc = skCase.getArtifactTypeID(artifact_name) artID_amc_evt = skCase.getArtifactType(artifact_name) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % mydb) stmt = dbConn.createStatement() except SQLException as e: return IngestModule.ProcessResult.OK Column_Names = [] Column_Types = [] self.log(Level.INFO, "Running Query: " + SQL_String_2) resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error (string), " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error (string2), " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log( Level.INFO, "Attributes Creation Error (long), " + resultSet2.getString("name") + " ==> ") self.log(Level.INFO, "Running Query: " + SQL_String_1) resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = files[0].newArtifact(artID_amc) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_" + col_name attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute( BlackboardAttribute( attID_ex1, CloudtopsyIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute( BlackboardAttribute( attID_ex1, CloudtopsyIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute( BlackboardAttribute( attID_ex1, CloudtopsyIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(CloudtopsyIngestModuleFactory.moduleName, artID_amc_evt, None)) stmt.close() dbConn.close() count += 1 progressBar.progress(count) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Cloudtopsy", " CloudTrail Logs Successfully Ingested!") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, datasource, progressbar): PostBoard=IngestServices.getInstance() progressbar.switchToIndeterminate() ccase = Case.getCurrentCase().getSleuthkitCase() blackboard = Case.getCurrentCase().getServices().getBlackboard() msgcounter = 0 # if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or # (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or # (file.isFile() == true)): # return IngestModule.ProcessResult.OK # #prepare artifacts artifact_name = "TSK_WINCOM_CONTACT" artifact_desc = "Windows Communication Contacts" try: artID_wincom_contact = ccase.addArtifactType(artifact_name, artifact_desc) attribute_name = "TSK_WINCOM_CONTACT_SERVICE" attribute_name1 = "TSK_WINCOM_CONTACT_APPID" attribute_name2 = "TSK_WINCOM_CONTACT_FIRSTNAME" attribute_name3 = "TSK_WINCOM_CONTACT_LASTNAME" attribute_name4 = "TSK_WINCOM_CONTACT_COUNTRY" attribute_name5 = "TSK_WINCOM_CONTACT_LOCALITY" attribute_name6 = "TSK_WINCOM_CONTACT_REGION" attribute_name7 = "TSK_WINCOM_CONTACT_BIRTHDAY" attID_ex= ccase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Service vs Person") attID_ex1 = ccase.addArtifactAttributeType(attribute_name1, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Application") attID_ex2 = ccase.addArtifactAttributeType(attribute_name2, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "First Name") attID_ex3 = ccase.addArtifactAttributeType(attribute_name3, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Last Name") attID_ex4 = ccase.addArtifactAttributeType(attribute_name4, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Country") attID_ex5 = ccase.addArtifactAttributeType(attribute_name5, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "City") attID_ex6 = ccase.addArtifactAttributeType(attribute_name6, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Region") attID_ex7 = ccase.addArtifactAttributeType(attribute_name7, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Birthday") except: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), "Error creating artifacts"+ str(msgcounter)) #IngestServices.getInstance().postMessage(message) self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_wincom_contact = ccase.getArtifactTypeID(artifact_name) artID_wincom_contact_evt = ccase.getArtifactType(artifact_name) #get files ##IngestServices.getInstance().postMessage(message) fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(datasource, "%appcontent-ms") numFiles = len(files) progressbar.switchToDeterminate(numFiles) fileCount = 0 for file in files: fileCount = fileCount + 1 progressbar.progress(fileCount) progressbar.progress("Windows Communication Analyzer") msgcounter+=1 # message = IngestMessage.createMessage( # IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), str(msgcounter) + " - in file loop and found file:" + str(file.getParentPath())) # #IngestServices.getInstance().postMessage(message) ParentPath = file.getParentPath() #if "microsoft.windowscommunicationsapps" in ParentPath and "_8wekyb3d8bbwe" in ParentPath and file.getName().lower().endswith("appcontent-ms") and "Address" in ParentPath : if file.getSize() > 0 and "microsoft.windowscommunicationsapps" in ParentPath: lclXMLPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".appcontent-ms") ContentUtils.writeToFile(file, File(lclXMLPath)) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK with open(lclXMLPath, "rb") as XMLFile: with open(lclXMLPath+".rewrite", 'w+b') as NewXMLFile: contents = XMLFile.read() newContent = contents.decode('utf-16').encode('utf-8') NewXMLFile.write(newContent.replace('<?xml version="1.0" encoding="utf-16"?>','<?xml version="1.0" encoding="utf-8"?>')) NewXMLFile.close() XMLFile.close() f = open(lclXMLPath+".rewrite", "rb") all = f.read() f.close() message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), all) #IngestServices.getInstance().postMessage(message) #XMLFile = open(lclXMLPath, "rb") AppID="**" FirstName = "**" LastName = "**" HomeAddress1Country = "**" HomeAddress1Locality = "**" HomeAddress1Region = "**" Birthday = "**" Service = "**" root = ET.fromstring(all) for elem in root.iter(): teller = 0 if "System.Contact.ConnectedServiceName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 Service = child.text # if teller == 1: break else: Service = elem.text elif "System.AppUserModel.PackageRelativeApplicationID" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 AppID = child.text # if teller == 1: break else: if len(elem.text)==0: AppID = elem.text else: AppID = "**" elif "System.Contact.FirstName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 FirstName = child.text if teller == 1: break else: FirstName = elem.text elif "System.Contact.LastName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 LastName = child.text if teller == 1: break else: LastName = elem.text elif "System.Contact.HomeAddress1Country" in str(elem.attrib): if len(elem.text) == 2: teller =+1 for child in elem: HomeAddress1Country = child.text if teller == 1: break else: HomeAddress1Country = elem.text elif "System.Contact.HomeAddress1Locality" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 HomeAddress1Locality = child.text if teller == 1: break else: HomeAddress1Locality = elem.text elif "System.Contact.HomeAddress1Region" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 HomeAddress1Region = child.text if teller == 1: break else: HomeAddress1Region = elem.text elif "System.Contact.Birthday" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 Birthday = child.text if teller == 1: break else: Birthday = elem.text else: #another value - manual forensics #AppID = "BLAHELSE" pass #IngestServices.getInstance().postMessage(message) #end looping through elements #ready for next file art = file.newArtifact(artID_wincom_contact) attID_ex =ccase.getAttributeType("TSK_WINCOM_CONTACT_SERVICE") art.addAttribute(BlackboardAttribute(attID_ex, WindowsCommunicationModuleFactory.moduleName, Service)) attID_ex1 =ccase.getAttributeType("TSK_WINCOM_CONTACT_APPID") art.addAttribute(BlackboardAttribute(attID_ex1, WindowsCommunicationModuleFactory.moduleName, AppID)) attID_ex2 =ccase.getAttributeType("TSK_WINCOM_CONTACT_FIRSTNAME") art.addAttribute(BlackboardAttribute(attID_ex2, WindowsCommunicationModuleFactory.moduleName, FirstName)) attID_ex3 =ccase.getAttributeType("TSK_WINCOM_CONTACT_LASTNAME") art.addAttribute(BlackboardAttribute(attID_ex3, WindowsCommunicationModuleFactory.moduleName, LastName)) attID_ex4 =ccase.getAttributeType("TSK_WINCOM_CONTACT_COUNTRY") art.addAttribute(BlackboardAttribute(attID_ex4, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Country)) attID_ex5 =ccase.getAttributeType("TSK_WINCOM_CONTACT_LOCALITY") art.addAttribute(BlackboardAttribute(attID_ex5, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Locality)) attID_ex6 =ccase.getAttributeType("TSK_WINCOM_CONTACT_REGION") art.addAttribute(BlackboardAttribute(attID_ex6, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Region)) attID_ex7 =ccase.getAttributeType("TSK_WINCOM_CONTACT_BIRTHDAY") art.addAttribute(BlackboardAttribute(attID_ex7, WindowsCommunicationModuleFactory.moduleName, Birthday)) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(WindowsCommunicationModuleFactory.moduleName, artID_wincom_contact_evt, None)) else: pass #cleanup try: os.remove(lclXMLPath) os.remove(lclXMLPath+".rewrite") except: self.log(Level.INFO, "Cleanup of files did not work ") message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Windows Communication App - Contacts", "Windows Communication App - Contacts Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK # def shutDown(self): # # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) # msg2 = IngestMessage.createMessage( # IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName, # "Found " + str(self.filesFound)) # ingestServices = IngestServices.getInstance().postMessage(msg2)
def process(self, dataSource, progressBar): # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find the "contacts.db" file to parse fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") # keep track of progress num_files = len(files) progressBar.switchToDeterminate(num_files) file_count = 0 for f in files: # First check to see if the job was cancelled # If it was, return. if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Begin processing the next file self.log(Level.INFO, "Processing file: " + f.getName()) file_count += 1 # need to save the current file to disk for processng lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(f.getId()) + ".db") ContentUtils.writeToFile(f, File(lclDbPath)) # Next we open the db for processing try: Class.forName("org.sqlite.JDBC").newInstance() db_conn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + f.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # queryr all from the contacts table try: stmt = db_conn.createStatement() result_set = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log( Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Process the DB while result_set.next(): # Make an artifact on the blackboard and give it attributes art = f.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) # Name found in DB name = result_set.getString("name") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON. getTypeID(), HW11ContactsDbIngestModuleFactory.moduleName, name)) # Email found email = result_set.getString("email") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID( ), HW11ContactsDbIngestModuleFactory.moduleName, email)) # Phone number found phone = result_set.getString("phone") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER. getTypeID(), HW11ContactsDbIngestModuleFactory.moduleName, phone)) # Index the artifact for keyword searching try: blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Update the UI of the newly created artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(HW11ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up tasks for the current file stmt.close() db_conn.close() os.remove(lclDbPath) # After all db's are processed, post a message to the ingest inbox. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % file_count) IngestServices.getInstance().postMessage(message) # return return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "Amcache.hve") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Amcache") except: self.log(Level.INFO, "Amcache Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\Amcache", file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Temp_Dir + "\Amcache.db3") subprocess.Popen([ self.path_to_exe, Temp_Dir + "\Amcache\Amcache.hve", Temp_Dir + "\Amcache.db3" ]).communicate()[0] for file in files: # Open the DB using JDBC lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "Amcache.db3") self.log(Level.INFO, "Path the Amcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. for am_table_name in self.List_Of_tables: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery( "Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('" + am_table_name + "'); ") # resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('associated_file_entries', " + \ # "'unassociated_programs', 'program_entries'); ") self.log(Level.INFO, "query SQLite Master table for " + am_table_name) except SQLException as e: self.log( Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log( Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" artifact_name = "TSK_" + table_name.upper() artifact_desc = "Amcache " + table_name.upper() #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, "Artifact_Name ==> " + artifact_name) #self.log(Level.INFO, "Artifact_desc ==> " + artifact_desc) #self.log(Level.INFO, SQL_String_2) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_amc = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artID_amc = skCase.getArtifactTypeID(artifact_name) artID_amc_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append( resultSet2.getString("name").upper()) Column_Types.append( resultSet2.getString("type").upper()) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("type")) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("name")) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType( "TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE. LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log( Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_amc) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute( BlackboardAttribute( attID_ex1, ParseAmcacheIngestModuleFactory. moduleName, resultSet3.getString( Column_Number))) # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute( BlackboardAttribute( attID_ex1, ParseAmcacheIngestModuleFactory. moduleName, resultSet3.getString( Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute( BlackboardAttribute( attID_ex1, ParseAmcacheIngestModuleFactory. moduleName, long( resultSet3.getInt( Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( ParseAmcacheIngestModuleFactory.moduleName, artID_amc_evt, None)) except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\" + file.getName()) except: self.log( Level.INFO, "removal of Amcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of Amcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Parser", " Amcache Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getTempDirectory() try: os.mkdir(Temp_Dir + "\MacFSEvents") except: self.log(Level.INFO, "FSEvents Directory already exists " + Temp_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", ".fseventsd") numFiles = len(files) for file in files: #self.log(Level.INFO, "Files ==> " + file.getName()) if (file.getName() == "..") or (file.getName() == '.') or (file.getName() == 'fseventsd-uuid'): pass #self.log(Level.INFO, "Files ==> " + str(file)) else: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(Temp_Dir + "\MacFSEvents", file.getName()) ContentUtils.writeToFile(file, File(filePath)) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) self.log(Level.INFO, "Running program ==> " + self.MacFSEvents_Executable + " -c Autopsy " + "-o " + Temp_Dir + \ " -s " + Temp_Dir + "\MacFSEvents") pipe = Popen([ self.MacFSEvents_Executable, "-c", "Autopsy", "-o", Temp_Dir, "-s", Temp_Dir + "\MacFSEvents" ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) database_file = Temp_Dir + "\\autopsy_FSEvents-Parsed_Records_DB.sqlite" #open the database to get the SQL and artifact info out of try: head, tail = os.path.split(os.path.abspath(__file__)) settings_db = head + "\\fsevents_sql.db3" Class.forName("org.sqlite.JDBC").newInstance() dbConn1 = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt1 = dbConn1.createStatement() sql_statement1 = "select distinct artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts") artID_fse = skCase.addArtifactType( resultSet1.getString("artifact_name"), resultSet1.getString("artifact_title")) except: self.log( Level.INFO, "Artifacts Creation Error, " + resultSet1.getString("artifact_name") + " some artifacts may not exist now. ==> ") except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Create the attribute type, if it exists then catch the error try: attID_fse_fn = skCase.addArtifactAttributeType( "TSK_FSEVENTS_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fse_msk = skCase.addArtifactAttributeType( "TSK_FSEVENTS_FILE_MASK", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Mask") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_src = skCase.addArtifactAttributeType( "TSK_FSEVENTS_SOURCE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source File") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_dte = skCase.addArtifactAttributeType( "TSK_FSEVENTS_DATES", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Date(s)") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection( "jdbc:sqlite:%s" % Temp_Dir + "\\autopsy_FSEvents-Parsed_Records_DB.sqlite") except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #artID_fse = skCase.getArtifactTypeID("TSK_MACOS_FSEVENTS") #artID_fse_evt = skCase.getArtifactType("TSK_MACOS_FSEVENTS") artID_fse = skCase.getArtifactTypeID("TSK_MACOS_ALL_FSEVENTS") artID_fse_evt = skCase.getArtifactType("TSK_MACOS_ALL_FSEVENTS") attID_fse_fn = skCase.getAttributeType("TSK_FSEVENTS_FILE_NAME") attID_fse_msk = skCase.getAttributeType("TSK_FSEVENTS_FILE_MASK") attID_fse_src = skCase.getAttributeType("TSK_FSEVENTS_SOURCE") attID_fse_dte = skCase.getAttributeType("TSK_FSEVENTS_DATES") # Query the database for file in files: if ('slack' in file.getName()): pass elif (file.getName() == '..') or (file.getName() == '.'): pass else: stmt1 = dbConn1.createStatement() sql_statement1 = "select sql_statement, artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: artID_fse = skCase.getArtifactTypeID( resultSet1.getString("artifact_name")) artID_fse_evt = skCase.getArtifactType( resultSet1.getString("artifact_name")) try: stmt = dbConn.createStatement() sql_statement = resultSet1.getString( "sql_statement" ) + " and source like '%" + file.getName() + "';" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet = stmt.executeQuery(sql_statement) #self.log(Level.INFO, "query SQLite Master table ==> " ) #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifact while resultSet.next(): # Add the attributes to the artifact. art = file.newArtifact(artID_fse) #self.log(Level.INFO, "Result ==> " + resultSet.getString("mask") + ' <==> ' + resultSet.getString("source")) art.addAttributes(((BlackboardAttribute(attID_fse_fn, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("filename"))), \ (BlackboardAttribute(attID_fse_msk, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("mask"))), \ (BlackboardAttribute(attID_fse_src, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("source"))), \ (BlackboardAttribute(attID_fse_dte, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("OTHER_DATES"))))) #try: # index the artifact for keyword search #blackboard.indexArtifact(art) #except: #self.log(Level.INFO, "Error indexing artifact " + art.getDisplayName()) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") try: stmt.close() except: self.log(Level.INFO, "Error closing statement for " + file.getName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(MacFSEventsIngestModuleFactory.moduleName, artID_fse_evt, None)) try: stmt.close() dbConn.close() stmt1.close() dbConn1.close() os.remove(Temp_Dir + "\Autopsy_FSEvents-EXCEPTIONS_LOG.txt") os.remove(Temp_Dir + "\Autopsy_FSEvents-Parsed_Records.tsv") os.remove(Temp_Dir + "\Autopsy_FSEvents-Parsed_Records_DB.sqlite") shutil.rmtree(Temp_Dir + "\MacFSEvents") except: self.log( Level.INFO, "removal of MacFSEvents imageinfo database failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "MacFSEventsSettings", " MacFSEventsSettings Has Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_jl_ad = skCase.addArtifactType( "TSK_JL_AD", "Jump List Auto Dest") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") try: attID_jl_fn = skCase.addArtifactAttributeType("TSK_JLAD_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "JumpList File Name") except: self.log(Level.INFO, "Attributes Creation Error, JL AD File Name. ==> ") try: attID_jl_fg = skCase.addArtifactAttributeType("TSK_JLAD_FILE_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Description") except: self.log(Level.INFO, "Attributes Creation Error, File Description. ==> ") try: attID_jl_in = skCase.addArtifactAttributeType("TSK_JLAD_ITEM_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Item Name") except: self.log(Level.INFO, "Attributes Creation Error, Item Name. ==> ") try: attID_jl_cl = skCase.addArtifactAttributeType("TSK_JLAD_COMMAND_LINE_ARGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Command Line Args") except: self.log(Level.INFO, "Attributes Creation Error, Command Line Arguments. ==> ") try: attID_jl_dt = skCase.addArtifactAttributeType("TSK_JLAD_Drive Type", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Type") except: self.log(Level.INFO, "Attributes Creation Error, Drive Type. ==> ") try: attID_jl_dsn = skCase.addArtifactAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Serial Number") except: self.log(Level.INFO, "Attributes Creation Error, Drive Serial Number. ==> ") try: attID_jl_des = skCase.addArtifactAttributeType("TSK_JLAD_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Description") except: self.log(Level.INFO, "Attributes Creation Error, Description. ==> ") try: attID_jl_evl = skCase.addArtifactAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Env Var Location") except: self.log(Level.INFO, "Attributes Creation Error, Env Var Location. ==> ") try: attID_jl_fat = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ACCESS_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Access Time") except: self.log(Level.INFO, "Attributes Creation Error, File Access Time. ==> ") try: attID_jl_faf = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Attribute Flags") except: self.log(Level.INFO, "Attributes Creation Error, File Attribute Flags. ==> ") try: attID_jl_fct = skCase.addArtifactAttributeType("TSK_JLAD_FILE_CREATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Creation Time") except: self.log(Level.INFO, "Attributes Creation Error, File Creation Time. ==> ") try: attID_jl_fmt = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Modification Time") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_fs = skCase.addArtifactAttributeType("TSK_JLAD_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_jl_ic = skCase.addArtifactAttributeType("TSK_JLAD_ICON_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Icon Location") except: self.log(Level.INFO, "Attributes Creation Error, Icon Location. ==> ") try: attID_jl_ltid = skCase.addArtifactAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Link Target Identifier Data") except: self.log(Level.INFO, "Attributes Creation Error, Link Target Identifier Data. ==> ") try: attID_jl_lp = skCase.addArtifactAttributeType("TSK_JLAD_LOCAL_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Local Path") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_mi = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Machine Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Machine Identifier. ==> ") try: attID_jl_np = skCase.addArtifactAttributeType("TSK_JLAD_NETWORK_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Network Path") except: self.log(Level.INFO, "Attributes Creation Error, Network Path. ==> ") try: attID_jl_rp = skCase.addArtifactAttributeType("TSK_JLAD_RELATIVE_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Relative Path") except: self.log(Level.INFO, "Attributes Creation Error, Relative Path. ==> ") try: attID_jl_vl = skCase.addArtifactAttributeType("TSK_JLAD_VOLUME_LABEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Volume Label") except: self.log(Level.INFO, "Attributes Creation Error, Volume Label. ==> ") try: attID_jl_wc = skCase.addArtifactAttributeType("TSK_JLAD_WORKING_DIRECTORY", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Working Directory") except: self.log(Level.INFO, "Attributes Creation Error, Working Directory. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") artID_jl_ad_evt = skCase.getArtifactType("TSK_JL_AD") attID_jl_fn = skCase.getAttributeType("TSK_JLAD_FILE_NAME") attID_jl_fg = skCase.getAttributeType("TSK_JLAD_FILE_DESCRIPTION") attID_jl_in = skCase.getAttributeType("TSK_JLAD_ITEM_NAME") attID_jl_cl = skCase.getAttributeType("TSK_JLAD_COMMAND_LINE_ARGS") attID_jl_dt = skCase.getAttributeType("TSK_JLAD_Drive Type") attID_jl_dsn = skCase.getAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER") attID_jl_des = skCase.getAttributeType("TSK_JLAD_DESCRIPTION") attID_jl_evl = skCase.getAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION") attID_jl_fat = skCase.getAttributeType("TSK_JLAD_FILE_ACCESS_TIME") attID_jl_faf = skCase.getAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS") attID_jl_fct = skCase.getAttributeType("TSK_JLAD_FILE_CREATION_TIME") attID_jl_fmt = skCase.getAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME") attID_jl_fs = skCase.getAttributeType("TSK_JLAD_FILE_SIZE") attID_jl_ic = skCase.getAttributeType("TSK_JLAD_ICON_LOCATION") attID_jl_ltid = skCase.getAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA") attID_jl_lp = skCase.getAttributeType("TSK_JLAD_LOCAL_PATH") attID_jl_mi = skCase.getAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER") attID_jl_np = skCase.getAttributeType("TSK_JLAD_NETWORK_PATH") attID_jl_rp = skCase.getAttributeType("TSK_JLAD_RELATIVE_PATH") attID_jl_vl = skCase.getAttributeType("TSK_JLAD_VOLUME_LABEL") attID_jl_wd = skCase.getAttributeType("TSK_JLAD_WORKING_DIRECTORY") #self.log(Level.INFO, "Artifact id for TSK_PREFETCH ==> " + str(artID_pf)) # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir + "\JL_AD") try: os.mkdir(Temp_Dir + "\JL_AD") except: self.log(Level.INFO, "JL_AD Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\JL_AD", file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + "\JL_AD" + " Parm 2 ==> " + Temp_Dir + "\JL_AD.db3") output = subprocess.Popen([self.path_to_exe, Temp_Dir + "\JL_AD", Temp_Dir + "\JL_AD.db3", self.path_to_app_id_db], stdout=subprocess.PIPE).communicate()[0] #self.log(Level.INFO, "Output for the JL_AD program ==> " + output) self.log(Level.INFO, " Return code is ==> " + output) # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "JL_AD.db3") self.log(Level.INFO, "Path to the JL_AD database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") for file in files: file_name = os.path.splitext(file.getName())[0] self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "select File_Name, File_Description, Item_Name, command_line_arguments, drive_type, drive_serial_number, " + \ " description, environment_variables_location, file_access_time, file_attribute_flags, file_creation_time, " + \ " file_modification_time, file_size, icon_location, link_target_identifier_data, local_path, " + \ " machine_identifier, network_path, relative_path, volume_label, working_directory " + \ " from Automatic_destinations_JL where upper(File_Name) = upper('" + file_name + "');" # " from Automatic_destinations_JL where File_Name||'.automaticDestinations-ms' = '" + file_name + "';" #self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # self.log(Level.INFO, "Result (" + resultSet.getString("File_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Recovered_Record") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Computer_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier_Qualifiers") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Level") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Offset") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Source_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_User_Security_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time_Epoch") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Detail_Text") + ")") File_Name = resultSet.getString("File_Name") File_Description = resultSet.getString("File_Description") Item_Name = resultSet.getString("Item_Name") Command_Line_Arguments = resultSet.getString("command_line_arguments") Drive_Type = resultSet.getInt("drive_type") Drive_Serial_Number = resultSet.getInt("drive_serial_number") Description = resultSet.getString("description") Environment_Variables_Location = resultSet.getString("environment_variables_location") File_Access_Time = resultSet.getString("file_access_time") File_Attribute_Flags = resultSet.getInt("file_attribute_flags") File_Creation_Time = resultSet.getString("file_creation_time") File_Modification_Time = resultSet.getString("file_modification_time") File_Size = resultSet.getInt("file_size") Icon_Location = resultSet.getString("icon_location") Link_Target_Identifier_Data = resultSet.getString("link_target_identifier_data") Local_Path = resultSet.getString("local_path") Machine_Identifier = resultSet.getString("machine_identifier") Network_Path = resultSet.getString("network_path") Relative_Path = resultSet.getString("relative_path") Volume_Label = resultSet.getString("volume_label") Working_Directory = resultSet.getString("working_directory") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") #fileManager = Case.getCurrentCase().getServices().getFileManager() #files = fileManager.findFiles(dataSource, Prefetch_File_Name) #for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Not the proper way to do it but it will work for the time being. art = file.newArtifact(artID_jl_ad) # This is for when proper atributes can be created. art.addAttributes(((BlackboardAttribute(attID_jl_fn, JumpListADDbIngestModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_jl_fg, JumpListADDbIngestModuleFactory.moduleName, File_Description)), \ (BlackboardAttribute(attID_jl_in, JumpListADDbIngestModuleFactory.moduleName, Item_Name)), \ (BlackboardAttribute(attID_jl_cl, JumpListADDbIngestModuleFactory.moduleName, Command_Line_Arguments)), \ (BlackboardAttribute(attID_jl_dt, JumpListADDbIngestModuleFactory.moduleName, Drive_Type)), \ (BlackboardAttribute(attID_jl_dsn, JumpListADDbIngestModuleFactory.moduleName, Drive_Serial_Number)), \ (BlackboardAttribute(attID_jl_des, JumpListADDbIngestModuleFactory.moduleName, Description)), \ (BlackboardAttribute(attID_jl_evl, JumpListADDbIngestModuleFactory.moduleName, Environment_Variables_Location)), \ (BlackboardAttribute(attID_jl_fat, JumpListADDbIngestModuleFactory.moduleName, File_Access_Time)), \ (BlackboardAttribute(attID_jl_faf, JumpListADDbIngestModuleFactory.moduleName, File_Attribute_Flags)), \ (BlackboardAttribute(attID_jl_fct, JumpListADDbIngestModuleFactory.moduleName, File_Creation_Time)), \ (BlackboardAttribute(attID_jl_fmt, JumpListADDbIngestModuleFactory.moduleName, File_Modification_Time)), \ (BlackboardAttribute(attID_jl_fs, JumpListADDbIngestModuleFactory.moduleName, File_Size)), \ (BlackboardAttribute(attID_jl_ic, JumpListADDbIngestModuleFactory.moduleName, Icon_Location)), \ (BlackboardAttribute(attID_jl_ltid, JumpListADDbIngestModuleFactory.moduleName, Link_Target_Identifier_Data)), \ (BlackboardAttribute(attID_jl_lp, JumpListADDbIngestModuleFactory.moduleName, Local_Path)), \ (BlackboardAttribute(attID_jl_mi, JumpListADDbIngestModuleFactory.moduleName, Machine_Identifier)), \ (BlackboardAttribute(attID_jl_np, JumpListADDbIngestModuleFactory.moduleName, Network_Path)), \ (BlackboardAttribute(attID_jl_rp, JumpListADDbIngestModuleFactory.moduleName, Relative_Path)), \ (BlackboardAttribute(attID_jl_vl, JumpListADDbIngestModuleFactory.moduleName, Volume_Label)), \ (BlackboardAttribute(attID_jl_wd, JumpListADDbIngestModuleFactory.moduleName, Working_Directory)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) # Clean up skCase_Tran.commit() stmt.close() dbConn.close() os.remove(lclDbPath) #skCase.close() #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\" + file.getName()) except: self.log(Level.INFO, "removal of JL_AD file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of JL_AD directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "JumpList AD", " JumpList AD Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "WebcacheV01.dat") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Webcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Webcache Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId())) DbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, file.getName() + ' ==> ' + str(file.getId()) + ' ==> ' + file.getUniquePath()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + DbPath) #subprocess.Popen([self.path_to_exe, lclDbPath, DbPath]).communicate()[0] pipe = Popen([self.path_to_exe, lclDbPath, DbPath], stdout=PIPE, stderr=PIPE, cwd=os.path.dirname(os.path.abspath(__file__))) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) for file in files: # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3") self.log(Level.INFO, "Path the Webcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #PSlsit => TSK_PROG_RUN # # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select distinct container_name from all_containers;") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK Container_List = [] while resultSet.next(): Container_List.append(resultSet.getString("container_name")) #self.log(Level.INFO, "Number of containers ==> " + str(len(Container_List)) + " ==> " + str(Container_List)) #self.log(Level.INFO, "Number of containers ==> " + str(Container_List) # Cycle through each row and create artifacts for c_name in Container_List: try: container_name = c_name #self.log(Level.INFO, "Result (" + container_name + ")") #self.log(Level.INFO, "Result get information from table " + container_name + " ") SQL_String_1 = "Select * from all_containers where container_name = '" + container_name + "';" SQL_String_2 = "PRAGMA table_info('All_Containers')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_WC_" + container_name.upper() artifact_desc = "WebcacheV01 " + container_name.upper() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_web = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_web = skCase.getArtifactTypeID(artifact_name) artID_web_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute. TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_web) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ==> " + Column_Types[Column_Number - 1]) #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttrTypeID(c_name) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseSRUDBIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute(BlackboardAttribute(attID_ex1, ParseWebcacheIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseWebcacheIngestModuleFactory.moduleName, artID_web_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up #stmt.close() #dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()))) os.remove(os.path.join(temp_dir, file.getName() + "-" + str(file.getId()) + ".db3")) except: self.log(Level.INFO, "removal of Webcache file failed " + temp_dir + "\\" + file.getName() + "-" + str(file.getId())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Webcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Webcache Parser", " Webcache Has Been Parsed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search # blackboard = Case.getCurrentCase().getServices().getBlackboard() #we're not using indexing # Get case case = Case.getCurrentCase().getSleuthkitCase() # For our example, we will use FileManager to get all # files with the word "test" # in the name and then count and read them # FileManager API: http://sleuthkit.org/autopsy/docs/api-docs/4.4/classorg_1_1sleuthkit_1_1autopsy_1_1casemodule_1_1services_1_1_file_manager.html fileManager = Case.getCurrentCase().getServices().getFileManager() setting_file = fileManager.findFiles( dataSource, "Phoenix.xml") if self.local_settings.get_parse_settings() else [] cache_files = fileManager.findFiles( dataSource, "%.0%", "com.netgear.android" ) if self.local_settings.get_parse_settings() else [] num_files = len(setting_file) + len(cache_files) self.log(Level.INFO, "found " + str(num_files) + " files") progressBar.switchToDeterminate(num_files) file_count = 0 # Cache Files (Thumbnails in cache/http & cache/cams directories under com.netgear.android if self.local_settings.get_parse_cache(): # Settings File for file in cache_files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) file_count += 1 # # # Make an artifact on the blackboard. # # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of # # artifact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, ArloIngestModuleFactory.moduleName, "Arlo Thumbnails") art.addAttribute(att) progressBar.progress(file_count) # Settings if self.local_settings.get_parse_settings(): # Settings File for file in setting_file: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) file_count += 1 # Make an artifact on the blackboard. # Set the DB file as an "interesting file" : TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, ArloIngestModuleFactory.moduleName, "Arlo") art.addAttribute(att) # Write to file (any way to contour this?) lcl_setting_path = os.path.join( Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".xml") ContentUtils.writeToFile(file, File(lcl_setting_path)) arlo_settings = minidom.parse(lcl_setting_path) tags = arlo_settings.getElementsByTagName("string") arlo_logins = {} for tag in tags: if tag.getAttribute('name') == "email": arlo_logins['username'] = str(tag.firstChild.data) elif tag.getAttribute('name') == "token": arlo_logins['token'] = str( tag.firstChild.data).replace("\n", "") elif tag.getAttribute('name') == "userId": arlo_logins['user_id'] = str(tag.firstChild.data) art_type_id = case.getArtifactTypeID("ESC_GENERIC_LOGIN") art_type = case.getArtifactType("ESC_GENERIC_LOGIN") # Artifact art = file.newArtifact(art_type_id) # Attributes att_login_username_id = case.getAttributeType( "ESC_GENERIC_LOGIN_USERNAME") att_login_secret_id = case.getAttributeType( "ESC_GENERIC_LOGIN_SECRET") att_login_secret_type_id = case.getAttributeType( "ESC_GENERIC_LOGIN_SECRET_TYPE") att_login_service_id = case.getAttributeType( "ESC_GENERIC_LOGIN_SERVICE") att_login_remarks_id = case.getAttributeType( "ESC_GENERIC_LOGIN_REMARKS") att_login_username = BlackboardAttribute( att_login_username_id, ArloIngestModuleFactory.moduleName, arlo_logins['username']) att_login_secret = BlackboardAttribute( att_login_secret_id, ArloIngestModuleFactory.moduleName, arlo_logins['token']) att_login_secret_type = BlackboardAttribute( att_login_secret_type_id, ArloIngestModuleFactory.moduleName, "Oauth2 Token") att_login_service = BlackboardAttribute( att_login_service_id, ArloIngestModuleFactory.moduleName, "Arlo") att_login_remarks = BlackboardAttribute( att_login_remarks_id, ArloIngestModuleFactory.moduleName, "User ID: %s" % arlo_logins['user_id']) art.addAttribute(att_login_username) art.addAttribute(att_login_secret) art.addAttribute(att_login_secret_type) art.addAttribute(att_login_service) art.addAttribute(att_login_remarks) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ArloIngestModuleFactory.moduleName, art_type, None)) progressBar.progress(file_count) # FINISHED! # Post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Arlo Analysis", "Found %d files" % file_count) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process Hiberfil.sys and Crash Dumps") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory if self.hiber_flag: Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() try: ModOut_Dir = os.path.join(Mod_Dir, "Volatility", "Memory-Image-hiberfil") self.log(Level.INFO, "Module Output Directory ===> " + ModOut_Dir) #dir_util.mkpath(ModOut_Dir) os.mkdir(os.path.join(Mod_Dir, "Volatility")) os.mkdir(ModOut_Dir) except: self.log(Level.INFO, "***** Error Module Output Directory already exists " + ModOut_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "hiberfil.sys", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) for file in files: self.log(Level.INFO, "File to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + file.getName()) tmp_Dir = Case.getCurrentCase().getTempDirectory() Hiber_File = os.path.join(tmp_Dir, file.getName()) ContentUtils.writeToFile(file, File(Hiber_File)) self.log(Level.INFO, "File name to process is ==> " + Hiber_File) # Create the directory to dump the hiberfil dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) pipe = Popen([self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Add hiberfil memory image to a new local data source services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") dir_list = [] dir_list.append(dump_file) # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager_2 = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager_2.addLocalFilesDataSource(str(device_id), "Hiberfile Memory Image", "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "HiberFil_Crash", " Hiberfil/Crash Dumps have been extracted fro Image. " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Get current date and time to append to final hashset file names now = datetime.now() # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create ClamAv directory in temp directory, if it exists then continue on processing temporaryDir = Case.getCurrentCase().getTempDirectory() tempDir = os.path.join(temporaryDir, "ClamAv") self.log(Level.INFO, "create Directory " + tempDir) try: os.mkdir(tempDir) except: self.log(Level.INFO, "ClamAv Directory already exists " + tempDir) moduleDir = Case.getCurrentCase().getModuleDirectory() modDir = os.path.join(moduleDir, "ClamAv") self.log(Level.INFO, "create Directory " + modDir) try: os.mkdir(modDir) except: self.log(Level.INFO, "ClamAv Directory already exists " + modDir) progressBar.progress("Downloading Main.cvd") filedata = urllib2.urlopen('http://database.clamav.net/main.cvd') datatowrite = filedata.read() with open(os.path.join(tempDir, "main.cvd"), 'wb') as f: f.write(datatowrite) progressBar.progress("Downloading Daily.cvd") filedata = urllib2.urlopen('http://database.clamav.net/daily.cvd') datatowrite = filedata.read() with open(os.path.join(tempDir, "daily.cvd"), 'wb') as f: f.write(datatowrite) progressBar.progress("Unpacking Main.cvd") os.chdir(tempDir) self.log(Level.INFO, "Running Command ==> " + self.pathToExe + " " + "--unpack" + " " + os.path.join(tempDir, "main.cvd")) pipe = Popen([self.pathToExe, "--unpack", os.path.join(tempDir, "main.cvd")], stdout=PIPE, stderr=PIPE) outText = pipe.communicate()[0] self.log(Level.INFO, "Output from run ==> " + outText) progressBar.progress("Unpacking Daily.cvd") self.log(Level.INFO, "Running Command ==> " + self.pathToExe + " " + "--unpack" + " " + os.path.join(tempDir, "daily.cvd")) pipe = Popen([self.pathToExe, "--unpack", os.path.join(tempDir, "daily.cvd")], stdout=PIPE, stderr=PIPE) outText = pipe.communicate()[0] self.log(Level.INFO, "Output from run ==> " + outText) progressBar.progress("Creating Daily Hashset") with open (os.path.join(tempDir, "daily.hdb"), "r") as hashFile: with open (os.path.join(modDir, "ClamAV_Daily_Hashset_" + str(now.strftime("%Y-%m-%d")) + ".txt"), "w") as autopsyHash: for line in hashFile: hashLine = line.split(":") autopsyHash.write(hashLine[0] + "\n") progressBar.progress("Creating Main Hashset") with open (os.path.join(tempDir, "main.hdb"), "r") as hashFile: with open (os.path.join(modDir, "ClamAV_Main_Hashset_" + str(now.strftime("%Y-%m-%d")) + ".txt"), "w") as autopsyHash: for line in hashFile: hashLine = line.split(":") autopsyHash.write(hashLine[0] + "\n") #Clean up recyclebin directory and files try: shutil.rmtree(tempDir) except: self.log(Level.INFO, "removal of directory tree failed " + tempDir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ClamAV Hashsets", " ClamAV Hashsets have been created " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "$UsnJrnl:$J", "$Extend") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "usnj") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Usnj Directory already exists " + temp_dir) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, "usnj.txt") ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Saved File ==> " + lclDbPath) # Run the EXE, saving output to a sqlite database #try: self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "\\usnj\\usnj.txt" + \ " " + Temp_Dir + "\\usnj.db3") pipe = Popen([self.path_to_exe, os.path.join(temp_dir, "usnj.txt"), os.path.join(temp_dir, "usnj.db3")], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "usnj.db3") self.log(Level.INFO, "Path the system database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) usnj.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for system table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_usnj = skCase.addArtifactType("TSK_USNJ", "NTFS UsrJrnl entries") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_usnj = skCase.getArtifactTypeID("TSK_USNJ") artID_usnj_evt = skCase.getArtifactType("TSK_USNJ") #self.log(Level.INFO, "get artifacts ID's " + str(artID_usnj)) #self.log(Level.INFO, "get artifacts ID's " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_USNJ_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_USNJ_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_usnj) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_USNJ_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseUsnJIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParseUsnJIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log(Level.INFO, "Error getting values from usnj table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseUsnJIngestModuleFactory.moduleName, artID_usnj_evt, None)) #Clean up EventLog directory and files os.remove(lclDbPath) try: os.remove(os.path.join(temp_dir, "usnj.txt")) except: self.log(Level.INFO, "removal of usnj.txt file failed " + temp_dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of usnj directory failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Usnj Parser", " Usnj Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseUsnJIngestModuleFactory.moduleName, artID_usnj_evt, None)) return IngestModule.ProcessResult.OK
def shutDown(self): message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, FindLastBootIngestModuleFactory.moduleName, str(self.filesFound) + " boot up records found") _ = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() connectionFiles = fileManager.findFiles(dataSource, "Connection.log%", ".atomic") numFiles = len(connectionFiles) progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Atomic Wallet directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Atomic_Wallet") try: os.mkdir(temporaryDirectory) except: pass # get and process connections for file in connectionFiles: if "-slack" not in file.getName(): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processConnectionLogs(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) # Get and process history file historyFiles = fileManager.findFiles(dataSource, "history.json", ".atomic") numFiles = len(historyFiles) for file in historyFiles: if "-slack" not in file.getName(): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processHistory(extractedFile, file) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) else: extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) try: os.remove(extractedFile) except: self.log(Level.INFO, "Failed to remove file " + extractedFile) try: shutil.rmtree(temporaryDirectory) except: self.log(Level.INFO, "removal of temporary directory failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Facebook Chat", " Facebook Chat Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def shutDown(self): message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, WiFiIngestModuleFactory.moduleName, str(self.filesFound) + " files found") _ = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Hive files to extract filesToExtract = ("SAM", "SYSTEM") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # Create BAM directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "bam") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "bam Directory already exists " + temp_dir) # Setup variables to use to store information systemHiveFile = [] userRids = {} bamRecord = [] for fileName in filesToExtract: files = fileManager.findFiles(dataSource, fileName, "Windows/System32/Config") numFiles = len(files) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Check path to only get the hive files in the config directory and no others if file.getParentPath().upper() == '/WINDOWS/SYSTEM32/CONFIG/': # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(filePath)) # Save SYSTEM Hive abstract file information to use later if file.getName() == 'SYSTEM': systemHiveFile = file bamRecord = self.processSYSTEMHive(filePath) elif file.getName() == 'SAM': # Get information from the SAM file returns dictionary with key of rid and value of user name userRids = self.processSAMHive(filePath) # Setup Artifact try: self.log(Level.INFO, "Begin Create New Artifacts") artID_ls = skCase.addArtifactType("TSK_BAM_KEY", "BAM Registry Key") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artifactName = "TSK_BAM_KEY" artId = skCase.getArtifactTypeID(artifactName) moduleName = BamKeyIngestModuleFactory.moduleName # Attributes to use TSK_USER_NAME, TSK_PROG_NAME, TSK_DATETIME for bamRec in bamRecord: attributes = ArrayList() art = systemHiveFile.newArtifact(artId) self.log(Level.INFO, "BamRec ==> " + str(bamRec)) if bamRec[0] in userRids.keys(): attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME. getTypeID(), moduleName, userRids[bamRec[0]])) else: attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME. getTypeID(), moduleName, bamRec[0])) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID( ), moduleName, bamRec[1])) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID( ), moduleName, int(bamRec[2]))) art.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artChat) except: self._logger.log( Level.WARNING, "Error indexing artifact " + art.getDisplayName()) #Clean up prefetch directory and files try: shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of directory tree failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "BamKey", " BamKey Files Have Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "store.db", "Store-V2") numFiles = len(files) #self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing moduleDirectory = os.path.join(Case.getCurrentCase().getModuleDirectory(), "spotlight") temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "spotlight") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(moduleDirectory) except: pass #self.log(Level.INFO, "Module directory already exists " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.db file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) self.processSpotlightFile(extractedFile, moduleDirectory) for file in files: # Open the DB using JDBC lclDbPath = os.path.join(moduleDirectory, "spotlight_db.db3") #self.log(Level.INFO, "Path to the mail database is ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: #self.log(Level.INFO, "Could not open database file (not SQLite) " + lclDbPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.processSpotlightDb(dbConn, file) # Clean up try: dbConn.close() shutil.rmtree(temporaryDirectory) except: self.log(Level.INFO, "removal of spotlight database failed " + temporaryDirectory) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Spotlight Parser", " Spotlight Db Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def throwWarning(self, msg): message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "CookieModulez", msg) IngestServices.getInstance().postMessage(message)
def shutDown(self): # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, ArtifactGroupFactory.moduleName, str(self.filesFound) + " files found") ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "staterepository-machine%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Appx_Programs") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) # Write out each users store.vol file and process it. for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the file locally. Use file id as name to reduce collisions extractedFile = os.path.join(temporaryDirectory, file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) #os.remove(extractedFile) for file in files: #os.remove(extractedFile) if file.getName().lower() == "staterepository-machine.srd": extractedFile = os.path.join(temporaryDirectory, file.getName()) artIdInsProg = skCase.getArtifactTypeID("TSK_INSTALLED_PROG") artIdInsProgType = skCase.getArtifactType("TSK_INSTALLED_PROG") moduleName = ProcessAppxProgramsIngestModuleFactory.moduleName try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extractedFile) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extractedFile + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("select distinct * from (Select packfam.name, packfam.publisher, packfam.publisherid, packuser.user, " + \ " case Architecture when 0 then 'X64' when 9 then 'x86' when 11 then 'Neutral' else Architecture end Architecture, " + \ " pack.ResourceId, " + \ " substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1, instr(substr(pack.packageFullName, instr(pack.packageFullName, '_') + 1), '_') - 1) version, " + \ " packfam.packageFamilyname, pack.packageFullName, '??' isFramework, '??' PackageUserInformaton, " + \ " '??' isResourcePakage, '??' IsBundle, '??' IsDevelopment, '??' Dependicies, '??' IsPartiallyStaged, " + \ " case SignatureOrigin when 3 then 'System' when 2 then 'Store' else 'Unknown' end SignatureKind, packuser.PackageStatus Status, " + \ " (substr(packuser.installTime,1,11) -11644473600) InstallTime, packloc.installedLocation " + \ " from PackageUser packuser, package pack, packageFamily packfam, packageLocation packloc " + \ " where packuser.package = pack._PackageId and pack.packageFamily = packfam._PackagefamilyId " + \ " and packloc.package = pack._packageId and (pack.resourceId is null or pack.resourceId = 'neutral')); ") self.log(Level.INFO, "query Appx tables") except SQLException as e: self.log(Level.INFO, "Error querying database for appx tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK # Cycle through each row and get the installed programs and install time while resultSet.next(): try: artInsProg = file.newArtifact(artIdInsProg) attributes = ArrayList() attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, moduleName, resultSet.getString("name"))) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), moduleName, resultSet.getInt("InstallTime"))) artInsProg.addAttributes(attributes) # index the artifact for keyword search try: blackboard.indexArtifact(artInsProg) except: pass except SQLException as e: self.log(Level.INFO, "Error getting values from Appx tables (" + e.getMessage() + ")") # Close the database statement try: stmt.close() dbConn.close() except: pass # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Appx Installed Programs", " Appx Installed Programs Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory modDir = os.path.join( Case.getCurrentCase().getModulesOutputDirAbsPath(), "AD1Extractor") try: os.mkdir(modDir) except: self.log(Level.INFO, "AD1 Extractor Directory already Exists " + modDir) moduleName = AD1ExtractorIngestModuleFactory.moduleName # get the current case skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) for file in files: self.log(Level.INFO, "File name to process is ==> " + file.getName()) self.log( Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) imageFile = file.getLocalAbsPath() if ((imageFile != None) or (not file.isDir())): if ".ad1" in imageFile.lower(): progressBar.progress("Extracting " + file.getName()) filename, file_extension = os.path.splitext(file.getName()) self.log( Level.INFO, "Running program ==> " + self.path_to_exe + " " + imageFile + " " + modDir + " " + os.path.join(modDir, filename + ".db3")) pipe = Popen([ self.path_to_exe, imageFile, modDir, os.path.join(modDir, filename + ".db3") ], stdout=PIPE, stderr=PIPE) outText = pipe.communicate()[0] try: self.log(Level.INFO, "Begin Create New Artifacts") artIdAD1 = skCase.addArtifactType( "AD1_EXTRACTOR", "AD1 Extraction") except: self.log( Level.INFO, "Artifacts Creation Error, Artifact AD1_EXTRACTOR may exist. ==> " ) artIdAD1 = skCase.getArtifactTypeID("AD1_EXTRACTOR") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection( "jdbc:sqlite:%s" % os.path.join(modDir, filename + ".db3")) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + os.path.join(modDir, filename + ".db3") + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery(self.sqlStatement) self.log(Level.INFO, "query ad1_info") except SQLException as e: self.log( Level.INFO, "Error querying database for ad1_info tables (" + e.getMessage() + ") ") return IngestModule.ProcessResult.OK # Cycle through each row and get the installed programs and install time while resultSet.next(): try: artAD1 = file.newArtifact(artIdAD1) attributes = ArrayList() attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_NAME, moduleName, resultSet.getString("file_name"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_TEMP_DIR, moduleName, resultSet.getString("ad1_path_name"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_DATETIME_CREATED, moduleName, resultSet.getInt("date_created"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_DATETIME_MODIFIED, moduleName, resultSet.getInt("date_modified"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_DATETIME_ACCESSED, moduleName, resultSet.getInt("date_accessed"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_HASH_MD5, moduleName, resultSet.getString("md5_hash"))) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE. TSK_HASH_SHA1, moduleName, resultSet.getString("sha1_hash"))) artAD1.addAttributes(attributes) # index the artifact for keyword search try: blackboard.postArtifact(artAD1) except: pass except SQLException as e: self.log( Level.INFO, "Error getting values from AD1tables (" + e.getMessage() + ")") # Close the database statement try: stmt.close() dbConn.close() except: pass dir_list = [] dir_list.append(modDir) services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] fileManager = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) progressBar.progress( "Adding Takeout files to AD1Extractor Data Source") # Add data source with files newDataSource = fileManager.addLocalFilesDataSource( str(device_id), "AD1", "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() for file_added in files_added: progressBar.progress( "Adding AD1 extracted files to new data source") skcase_data.notifyDataSourceAdded(file_added, device_id) #self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "AD1ExtractorSettings", " AD1Extractors Has Been Run ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); # This will work in 4.0.1 and beyond # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat1 = skCase.addArtifactType( "TSK_FH_CATALOG_1", "File History Catalog 1") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 1. ==> ") artID_cat1 = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_cat2 = skCase.addArtifactType( "TSK_FH_CATALOG_2", "File History Catalog 2") except: self.log(Level.INFO, "Artifacts Creation Error, Catalog 2. ==> ") artID_cat2 = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") # Create the attribute type, if it exists then catch the error try: attID_fh_pn = skCase.addArtifactAttributeType('TSK_FH_PATH', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Parent Path") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch Parent Path. ==> ") try: attID_fh_fn = skCase.addArtifactAttributeType('TSK_FH_FILE_NAME', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fh_fs = skCase.addArtifactAttributeType('TSK_FH_FILE_SIZE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_fh_usn = skCase.addArtifactAttributeType('TSK_FH_USN_JOURNAL_ENTRY', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "USN Journal Entry") except: self.log(Level.INFO, "Attributes Creation Error, USN Journal Entry. ==> ") try: attID_fh_fc = skCase.addArtifactAttributeType('TSK_FH_FILE_CREATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Created") except: self.log(Level.INFO, "Attributes Creation Error, File Created. ==> ") try: attID_fh_fm = skCase.addArtifactAttributeType('TSK_FH_FILE_MODIFIED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "File Modified") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_fh_bq = skCase.addArtifactAttributeType('TSK_FH_BACKUP_QUEUED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Queued") except: self.log(Level.INFO, "Attributes Creation Error, Backup Queued ==> ") try: attID_fh_bc = skCase.addArtifactAttributeType('TSK_FH_BACKUP_CREATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Created") except: self.log(Level.INFO, "Attributes Creation Error, Backup Created ==> ") try: attID_fh_bcp = skCase.addArtifactAttributeType('TSK_FH_BACKUP_CAPTURED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Captured") except: self.log(Level.INFO, "Attributes Creation Error, Backup Captured. ==> ") try: attID_fh_bu = skCase.addArtifactAttributeType('TSK_FH_BACKUP_UPDATED', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Updated") except: self.log(Level.INFO, "Attributes Creation Error, Backup Updated. ==> ") try: attID_fh_bv = skCase.addArtifactAttributeType('TSK_FH_BACKUP_VISIBLE', BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Backup Visible") except: self.log(Level.INFO, "Attributes Creation Error, Backup Visible ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created #artID_wfh = skCase.getArtifactTypeID("TSK_PREFETCH") #artID_cat1 = skCase.getArtifactType("TSK_FH_CATALOG_1") #artID_cat2 = skCase.getArtifactType("TSK_FH_CATALOG_2") attID_fh_pn = skCase.getAttributeType("TSK_FH_PATH") attID_fh_fn = skCase.getAttributeType("TSK_FH_FILE_NAME") attID_fh_fs = skCase.getAttributeType("TSK_FH_FILE_SIZE") attID_fh_usn = skCase.getAttributeType("TSK_FH_USN_JOURNAL_ENTRY") attID_fh_fc = skCase.getAttributeType("TSK_FH_FILE_CREATED") attID_fh_fm = skCase.getAttributeType("TSK_FH_FILE_MODIFIED") attID_fh_bq = skCase.getAttributeType("TSK_FH_BACKUP_QUEUED") attID_fh_bc = skCase.getAttributeType("TSK_FH_BACKUP_CREATED") attID_fh_bcp = skCase.getAttributeType("TSK_FH_BACKUP_CAPTURED") attID_fh_bu = skCase.getAttributeType("TSK_FH_BACKUP_UPDATED") attID_fh_bv = skCase.getAttributeType("TSK_FH_BACKUP_VISIBLE") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the file history files from the users folders fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%edb", "%/Windows/FileHistory/%") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create file history directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "File_History") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "File_History Directory already exists " + Temp_Dir) # Write out each catalog esedb database to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName() + "_" + str(file.getId())) db_name = os.path.splitext(file.getName())[0] lclSQLPath = os.path.join(Temp_Dir, db_name + "_" + str(file.getId()) + ".db3") ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database if PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program on data source parm 1 ==> " + self.path_to_exe + " " + lclDbPath + " " + lclSQLPath) pipe = Popen([self.path_to_exe, lclDbPath, lclSQLPath, os.path.dirname(os.path.abspath(__file__))], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if db_name == "Catalog1": artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_1") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_1") else: artID_fh = skCase.getArtifactTypeID("TSK_FH_CATALOG_2") artID_fh_evt = skCase.getArtifactType("TSK_FH_CATALOG_2") userpath = file.getParentPath() username = userpath.split('/') self.log(Level.INFO, "Getting Username " + username[2] ) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclSQLPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + lclSQLPath + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "Select ParentName 'TSK_FH_PATH', Childname 'TSK_FH_FILE_NAME', " + \ "Filesize 'TSK_FH_FILE_SIZE', " + \ "usn 'TSK_FH_USN_JOURNAL_ENTRY', " + \ "FileCreated 'TSK_FH_FILE_CREATED', filemodified 'TSK_FH_FILE_MODIFIED', " + \ "tqueued 'TSK_FH_BACKUP_QUEUED', tcreated 'TSK_FH_BACKUP_CREATED', " + \ "tcaptured 'TSK_FH_BACKUP_CAPTURED', tupdated 'TSK_FH_BACKUP_UPDATED', " + \ "tvisible 'TSK_FH_BACKUP_VISIBLE' from file_history" self.log(Level.INFO, "SQL Statement --> " + SQL_Statement) resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for File_History table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") FH_Path = resultSet.getString("TSK_FH_PATH") FH_File_Name = resultSet.getString("TSK_FH_FILE_NAME") FH_Filesize = resultSet.getString("TSK_FH_FILE_SIZE") FH_Usn = resultSet.getString("TSK_FH_USN_JOURNAL_ENTRY") FH_FC = resultSet.getInt("TSK_FH_FILE_CREATED") FH_FM = resultSet.getInt("TSK_FH_FILE_MODIFIED") FH_BQ = resultSet.getInt("TSK_FH_BACKUP_QUEUED") FH_BC = resultSet.getInt("TSK_FH_BACKUP_CREATED") FH_BCP = resultSet.getInt("TSK_FH_BACKUP_CAPTURED") FH_BU = resultSet.getInt("TSK_FH_BACKUP_UPDATED") FH_BV = resultSet.getInt("TSK_FH_BACKUP_VISIBLE") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported art = file.newArtifact(artID_fh) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_fh_pn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Path)), \ (BlackboardAttribute(attID_fh_fn, ParseFileHistoryIngestModuleFactory.moduleName, FH_File_Name)), \ (BlackboardAttribute(attID_fh_fs, ParseFileHistoryIngestModuleFactory.moduleName, FH_Filesize)), \ (BlackboardAttribute(attID_fh_usn, ParseFileHistoryIngestModuleFactory.moduleName, FH_Usn)), \ (BlackboardAttribute(attID_fh_fc, ParseFileHistoryIngestModuleFactory.moduleName, FH_FC)), \ (BlackboardAttribute(attID_fh_fm, ParseFileHistoryIngestModuleFactory.moduleName, FH_FM)), \ (BlackboardAttribute(attID_fh_bq, ParseFileHistoryIngestModuleFactory.moduleName, FH_BQ)), \ (BlackboardAttribute(attID_fh_bc, ParseFileHistoryIngestModuleFactory.moduleName, FH_BC)), \ (BlackboardAttribute(attID_fh_bcp, ParseFileHistoryIngestModuleFactory.moduleName, FH_BCP)), \ (BlackboardAttribute(attID_fh_bu, ParseFileHistoryIngestModuleFactory.moduleName, FH_BU)), \ (BlackboardAttribute(attID_fh_bv, ParseFileHistoryIngestModuleFactory.moduleName, FH_BV)), \ (BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), \ ParseFileHistoryIngestModuleFactory.moduleName, username[2])))) try: #index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParseFileHistoryIngestModuleFactory.moduleName, artID_fh_evt, None)) # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up prefetch directory and files try: shutil.rmtree(Temp_Dir) except: self.log(Level.INFO, "removal of directory tree failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Windows File History Parser", " Windows File History Has Been Parsed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.jpg") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # artifact. Refer to the developer docs for other examples. art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) att = BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, PerceptualHashIngestModuleFactory.moduleName, "Picture Files") art.addAttribute(att) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there is a new artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( PerceptualHashIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None)) Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "Create Directory " + Temp_Dir) try: temp_dir = os.path.join(Temp_Dir, "Pictures") os.mkdir(temp_dir) except: self.log(Level.INFO, "Pictures Directory already exists " + temp_dir) # Save the File locally in a user-defined temp folder. lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # This code will use the phash library to calculate perceptual hash value and difference. phash = PHash() path_img = os.path.join(temp_dir, file.getName()) bit_phash = phash.getHash(path_img) hex_phash = PHash.binaryString2hexString(bit_phash) self.log(Level.INFO, file.getName() + ":Path ==> " + path_img + " ") #self.log(Level.INFO, #file.getName() + ":pHash(bit) ==> " + bit_phash + " ") self.log(Level.INFO, file.getName() + ":PHash ==> " + hex_phash + " ") if (self.pHashToCheck != ""): self.log(Level.INFO, "pHashToCheck ==> " + self.pHashToCheck + " ") differ_phash = PHash.distance( PHash.hexString2binaryString(self.pHashToCheck), bit_phash) self.log( Level.INFO, file.getName() + ":Difference ==> " + str(differ_phash) + " ") if (differ_phash < 20): self.log(Level.INFO, file.getName() + ":Similar? ==> True ") else: self.log(Level.INFO, file.getName() + ":Similar? ==> False ") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "PerceptualHash Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # get current case and the store.vol abstract file information skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.eml") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Email directory in temp amd modules directory, if it exists then continue on processing moduleDirectory = os.path.join(Case.getCurrentCase().getModuleDirectory(), "Email-Eml") temporaryDirectory = os.path.join(Case.getCurrentCase().getTempDirectory(), "Email-Eml") #self.log(Level.INFO, "create Directory " + moduleDirectory) try: os.mkdir(moduleDirectory) except: pass #self.log(Level.INFO, "Module directory already exists " + moduleDirectory) try: os.mkdir(temporaryDirectory) except: pass #self.log(Level.INFO, "Temporary directory already exists " + temporaryDirectory) for file in files: #self.log(Level.INFO, 'Parent Path is ==> ' + file.getParentPath()) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK try: if (not (file.getName().endswith('-slack'))): #self.log(Level.INFO, "Writing file ==> " + file.getName()) extractedFile = os.path.join(temporaryDirectory, str(file.getId()) + "-" + file.getName()) ContentUtils.writeToFile(file, File(extractedFile)) except: self.log(Level.INFO, "Error writing File " + os.path.join(temporaryDirectory, file.getName())) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK eml = emlParser(temporaryDirectory, moduleDirectory) eml.processEmls() emlList = eml.getEmlList() if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Get the user Envelope Index to see what emails there are for file in files: #self.log(Level.INFO, "Check file name ==> " + file.getName()) if (not (file.getName().endswith('-slack'))): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.processEmails(skCase, file, emlList[str(file.getId()) + "-" + file.getName()], dataSource) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Mac Mail Processor", " Mac Mail Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log( Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) Files_submitted = 0 try: os.mkdir(Temp_Dir + "\Cuckoo") except: self.log(Level.INFO, "Cuckoo Directory already exists " + Temp_Dir) for tag_name in self.tag_list: self.log(Level.INFO, "Processing Tag ==> " + tag_name) sql_statement = "select name, parent_path from tsk_files a, tag_names c, content_tags d " + \ " where d.tag_name_id = c.tag_name_id and c.display_name = '" + tag_name + "' and d.obj_id = a.obj_id;" self.log(Level.INFO, "SQL Statement ==> " + sql_statement) skCase = Case.getCurrentCase().getSleuthkitCase() dbquery = skCase.executeQuery(sql_statement) resultSet = dbquery.getResultSet() while resultSet.next(): fileManager = Case.getCurrentCase().getServices( ).getFileManager() files = fileManager.findFiles( dataSource, resultSet.getString("name"), resultSet.getString("parent_path")) numFiles = len(files) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) # Save the File locally in the temp folder. FilePath = os.path.join(Temp_Dir + "\cuckoo", file.getName()) ContentUtils.writeToFile(file, File(FilePath)) # Call the Cuckoo API to submit the file pipe = Popen([ self.path_to_cuckoo_exe, self.Protocol, self.IP_Address, self.Port_Number, "submit_file", FilePath ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log( Level.INFO, resultSet.getString("parent_path") + "\\" + resultSet.getString("name") + "<== Status of File Submit is " + out_text + " ==>") Files_submitted = Files_submitted + 1 #Submit error Message for message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Cuckoo File Submit", resultSet.getString("parent_path") + "/" + resultSet.getString("name") + " " + out_text) IngestServices.getInstance().postMessage(message) #Delete File that was written try: os.remove(FilePath) except: self.log(Level.INFO, "removal of " + FilePath + " Failed ") dbquery.close() message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "Cuckoo File Submit", str(Files_submitted) + " files have been submitted to cuckoo") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") head, tail = os.path.split(os.path.abspath(__file__)) settings_db = head + "\\Macos_recents.db3" # Run this first to get the version of the OS to pass to the rest of the program self.parse_plist_data(dataSource, progressBar, 'All', 1, settings_db) self.log(Level.INFO, "MacOS Version is ===> " + self.os_version + " < == ") # get rid of minor revision number if self.os_version.count('.') > 1: position = 0 count = 0 for c in self.os_version: position = position + 1 if c == '.': count = count + 1 if count > 1: break self.os_version = self.os_version[:position - 1] #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) macos_recents.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the artifact table in the database and get all columns. try: stmt = dbConn.createStatement() process_data_sql = "select mac_osx_art_id, mac_osx_art_type, os_version from mac_artifact a, os_version b " + \ " where a.os_id = b.os_id and b.os_version = '10.12' and mac_osx_art_id > 1;" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "query mac_artifact table") except SQLException as e: self.log(Level.INFO, "Error querying database for mac_artifact (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): if resultSet.getString("mac_osx_art_type") == "Plist": self.parse_plist_data(dataSource, progressBar, resultSet.getString("os_version"), resultSet.getString("mac_osx_art_id"), \ settings_db) else: self.parse_sqlite_data(dataSource, progressBar, resultSet.getString("os_version"), resultSet.getString("mac_osx_art_id"), \ settings_db) self.log(Level.INFO, "MacOS Version is ===> " + self.os_version + " < == ") self.log(Level.INFO, "ending process, Just before call to parse_safari_history") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process Hiberfil.sys and Crash Dumps") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory if self.hiber_flag: Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() try: ModOut_Dir = os.path.join(Mod_Dir, "Volatility", "Memory-Image-hiberfil") self.log(Level.INFO, "Module Output Directory ===> " + ModOut_Dir) #dir_util.mkpath(ModOut_Dir) os.mkdir(os.path.join(Mod_Dir, "Volatility")) os.mkdir(ModOut_Dir) except: self.log( Level.INFO, "***** Error Module Output Directory already exists " + ModOut_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "hiberfil.sys", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) for file in files: self.log(Level.INFO, "File to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + file.getName()) tmp_Dir = Case.getCurrentCase().getTempDirectory() Hiber_File = os.path.join(tmp_Dir, file.getName()) ContentUtils.writeToFile(file, File(Hiber_File)) self.log(Level.INFO, "File name to process is ==> " + Hiber_File) # Create the directory to dump the hiberfil dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) if PlatformUtil.isWindowsOS(): pipe = Popen([ "Python.exe", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file ], stdout=PIPE, stderr=PIPE) else: pipe = Popen([ "python", self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file ], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " imagecopy -f " + Hiber_File + " " + \ " -O " + dump_file) pipe = Popen([ self.Volatility_Executable, "imagecopy", "-f", Hiber_File, "-O" + dump_file ], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Add hiberfil memory image to a new local data source services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] dump_file = os.path.join(ModOut_Dir, "Memory-Image-from-hiberfil.img") dir_list = [] dir_list.append(dump_file) # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager_2 = Case.getCurrentCase().getServices( ).getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager_2.addLocalFilesDataSource( str(device_id), "Hiberfile Memory Image", "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "HiberFil_Crash", " Hiberfil/Crash Dumps have been extracted fro Image. ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "MacFSEvents") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "FSEvents Directory already exists " + temp_dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", ".fseventsd") numFiles = len(files) for file in files: #self.log(Level.INFO, "Files ==> " + file.getName()) if (file.getName() == "..") or (file.getName() == '.') or (file.getName() == 'fseventsd-uuid'): pass #self.log(Level.INFO, "Files ==> " + str(file)) else: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Save the DB locally in the temp folder. use file id as name to reduce collisions filePath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(filePath)) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) self.log(Level.INFO, "Running program ==> " + self.MacFSEvents_Executable + " -c Autopsy " + "-o " + temp_dir + \ " -s " + Temp_Dir + "\MacFSEvents") pipe = Popen([self.MacFSEvents_Executable, "-c", "Autopsy", "-o", temp_dir, "-s", temp_dir], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) database_file = os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite") #open the database to get the SQL and artifact info out of try: head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "fsevents_sql.db3") Class.forName("org.sqlite.JDBC").newInstance() dbConn1 = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt1 = dbConn1.createStatement() sql_statement1 = "select distinct artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts") artID_fse = skCase.addArtifactType( resultSet1.getString("artifact_name"), resultSet1.getString("artifact_title")) except: self.log(Level.INFO, "Artifacts Creation Error, " + resultSet1.getString("artifact_name") + " some artifacts may not exist now. ==> ") except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Create the attribute type, if it exists then catch the error try: attID_fse_fn = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Name") except: self.log(Level.INFO, "Attributes Creation Error, File Name. ==> ") try: attID_fse_msk = skCase.addArtifactAttributeType("TSK_FSEVENTS_FILE_MASK", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Mask") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_src = skCase.addArtifactAttributeType("TSK_FSEVENTS_SOURCE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source File") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: attID_fse_dte = skCase.addArtifactAttributeType("TSK_FSEVENTS_DATES", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Date(s)") except: self.log(Level.INFO, "Attributes Creation Error, Mask. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % os.path.join(temp_dir, "Autopsy_FSEvents-Parsed_Records_DB.sqlite")) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK #artID_fse = skCase.getArtifactTypeID("TSK_MACOS_FSEVENTS") #artID_fse_evt = skCase.getArtifactType("TSK_MACOS_FSEVENTS") artID_fse = skCase.getArtifactTypeID("TSK_MACOS_ALL_FSEVENTS") artID_fse_evt = skCase.getArtifactType("TSK_MACOS_ALL_FSEVENTS") attID_fse_fn = skCase.getAttributeType("TSK_FSEVENTS_FILE_NAME") attID_fse_msk = skCase.getAttributeType("TSK_FSEVENTS_FILE_MASK") attID_fse_src = skCase.getAttributeType("TSK_FSEVENTS_SOURCE") attID_fse_dte = skCase.getAttributeType("TSK_FSEVENTS_DATES") # Query the database for file in files: if ('slack' in file.getName()): pass elif (file.getName() == '..') or (file.getName() == '.'): pass else: stmt1 = dbConn1.createStatement() sql_statement1 = "select sql_statement, artifact_name, artifact_title from extracted_content_sql;" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet1 = stmt1.executeQuery(sql_statement1) while resultSet1.next(): try: artID_fse = skCase.getArtifactTypeID(resultSet1.getString("artifact_name")) artID_fse_evt = skCase.getArtifactType(resultSet1.getString("artifact_name")) try: stmt = dbConn.createStatement() sql_statement = resultSet1.getString("sql_statement") + " and source like '%" + file.getName() + "';" #self.log(Level.INFO, "SQL Statement ==> " + sql_statement) resultSet = stmt.executeQuery(sql_statement) #self.log(Level.INFO, "query SQLite Master table ==> " ) #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifact while resultSet.next(): # Add the attributes to the artifact. art = file.newArtifact(artID_fse) #self.log(Level.INFO, "Result ==> " + resultSet.getString("mask") + ' <==> ' + resultSet.getString("source")) art.addAttributes(((BlackboardAttribute(attID_fse_fn, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("filename"))), \ (BlackboardAttribute(attID_fse_msk, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("mask"))), \ (BlackboardAttribute(attID_fse_src, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("source"))), \ (BlackboardAttribute(attID_fse_dte, MacFSEventsIngestModuleFactory.moduleName, resultSet.getString("OTHER_DATES"))))) #try: # index the artifact for keyword search #blackboard.indexArtifact(art) #except: #self.log(Level.INFO, "Error indexing artifact " + art.getDisplayName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + database_file + " (" + e.getMessage() + ")") try: stmt.close() except: self.log(Level.INFO, "Error closing statement for " + file.getName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(MacFSEventsIngestModuleFactory.moduleName, artID_fse_evt, None)) try: stmt.close() dbConn.close() stmt1.close() dbConn1.close() #os.remove(Temp_Dir + "Autopsy_FSEvents-EXCEPTIONS_LOG.txt") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records.tsv") #os.remove(Temp_Dir + "Autopsy_FSEvents-Parsed_Records_DB.sqlite") shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of MacFSEvents imageinfo database failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "MacFSEventsSettings", " MacFSEventsSettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_jl_ad = skCase.addArtifactType( "TSK_JL_AD", "Jump List Auto Dest") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") try: attID_jl_fn = skCase.addArtifactAttributeType("TSK_JLAD_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "JumpList File Name") except: self.log(Level.INFO, "Attributes Creation Error, JL AD File Name. ==> ") try: attID_jl_fg = skCase.addArtifactAttributeType("TSK_JLAD_FILE_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Description") except: self.log(Level.INFO, "Attributes Creation Error, File Description. ==> ") try: attID_jl_in = skCase.addArtifactAttributeType("TSK_JLAD_ITEM_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Item Name") except: self.log(Level.INFO, "Attributes Creation Error, Item Name. ==> ") try: attID_jl_cl = skCase.addArtifactAttributeType("TSK_JLAD_COMMAND_LINE_ARGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Command Line Args") except: self.log(Level.INFO, "Attributes Creation Error, Command Line Arguments. ==> ") try: attID_jl_dt = skCase.addArtifactAttributeType("TSK_JLAD_Drive Type", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Type") except: self.log(Level.INFO, "Attributes Creation Error, Drive Type. ==> ") try: attID_jl_dsn = skCase.addArtifactAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Drive Serial Number") except: self.log(Level.INFO, "Attributes Creation Error, Drive Serial Number. ==> ") try: attID_jl_des = skCase.addArtifactAttributeType("TSK_JLAD_DESCRIPTION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Description") except: self.log(Level.INFO, "Attributes Creation Error, Description. ==> ") try: attID_jl_evl = skCase.addArtifactAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Env Var Location") except: self.log(Level.INFO, "Attributes Creation Error, Env Var Location. ==> ") try: attID_jl_fat = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ACCESS_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Access Time") except: self.log(Level.INFO, "Attributes Creation Error, File Access Time. ==> ") try: attID_jl_faf = skCase.addArtifactAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Attribute Flags") except: self.log(Level.INFO, "Attributes Creation Error, File Attribute Flags. ==> ") try: attID_jl_fct = skCase.addArtifactAttributeType("TSK_JLAD_FILE_CREATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Creation Time") except: self.log(Level.INFO, "Attributes Creation Error, File Creation Time. ==> ") try: attID_jl_fmt = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "File Modification Time") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_fs = skCase.addArtifactAttributeType("TSK_JLAD_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, File Size. ==> ") try: attID_jl_ic = skCase.addArtifactAttributeType("TSK_JLAD_ICON_LOCATION", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Icon Location") except: self.log(Level.INFO, "Attributes Creation Error, Icon Location. ==> ") try: attID_jl_ltid = skCase.addArtifactAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Link Target Identifier Data") except: self.log(Level.INFO, "Attributes Creation Error, Link Target Identifier Data. ==> ") try: attID_jl_lp = skCase.addArtifactAttributeType("TSK_JLAD_LOCAL_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Local Path") except: self.log(Level.INFO, "Attributes Creation Error, File Modification Time. ==> ") try: attID_jl_mi = skCase.addArtifactAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Machine Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Machine Identifier. ==> ") try: attID_jl_np = skCase.addArtifactAttributeType("TSK_JLAD_NETWORK_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Network Path") except: self.log(Level.INFO, "Attributes Creation Error, Network Path. ==> ") try: attID_jl_rp = skCase.addArtifactAttributeType("TSK_JLAD_RELATIVE_PATH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Relative Path") except: self.log(Level.INFO, "Attributes Creation Error, Relative Path. ==> ") try: attID_jl_vl = skCase.addArtifactAttributeType("TSK_JLAD_VOLUME_LABEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Volume Label") except: self.log(Level.INFO, "Attributes Creation Error, Volume Label. ==> ") try: attID_jl_wc = skCase.addArtifactAttributeType("TSK_JLAD_WORKING_DIRECTORY", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Working Directory") except: self.log(Level.INFO, "Attributes Creation Error, Working Directory. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_jl_ad = skCase.getArtifactTypeID("TSK_JL_AD") artID_jl_ad_evt = skCase.getArtifactType("TSK_JL_AD") attID_jl_fn = skCase.getAttributeType("TSK_JLAD_FILE_NAME") attID_jl_fg = skCase.getAttributeType("TSK_JLAD_FILE_DESCRIPTION") attID_jl_in = skCase.getAttributeType("TSK_JLAD_ITEM_NAME") attID_jl_cl = skCase.getAttributeType("TSK_JLAD_COMMAND_LINE_ARGS") attID_jl_dt = skCase.getAttributeType("TSK_JLAD_Drive Type") attID_jl_dsn = skCase.getAttributeType("TSK_JLAD_DRIVE_SERIAL_NUMBER") attID_jl_des = skCase.getAttributeType("TSK_JLAD_DESCRIPTION") attID_jl_evl = skCase.getAttributeType("TSK_JLAD_ENVIRONMENT_VARIABLES_LOCATION") attID_jl_fat = skCase.getAttributeType("TSK_JLAD_FILE_ACCESS_TIME") attID_jl_faf = skCase.getAttributeType("TSK_JLAD_FILE_ATTRIBUTE_FLAGS") attID_jl_fct = skCase.getAttributeType("TSK_JLAD_FILE_CREATION_TIME") attID_jl_fmt = skCase.getAttributeType("TSK_JLAD_FILE_MODIFICATION_TIME") attID_jl_fs = skCase.getAttributeType("TSK_JLAD_FILE_SIZE") attID_jl_ic = skCase.getAttributeType("TSK_JLAD_ICON_LOCATION") attID_jl_ltid = skCase.getAttributeType("TSK_JLAD_LINK_TARGET_IDENTIFIER_DATA") attID_jl_lp = skCase.getAttributeType("TSK_JLAD_LOCAL_PATH") attID_jl_mi = skCase.getAttributeType("TSK_JLAD_FILE_MACHINE_IDENTIFIER") attID_jl_np = skCase.getAttributeType("TSK_JLAD_NETWORK_PATH") attID_jl_rp = skCase.getAttributeType("TSK_JLAD_RELATIVE_PATH") attID_jl_vl = skCase.getAttributeType("TSK_JLAD_VOLUME_LABEL") attID_jl_wd = skCase.getAttributeType("TSK_JLAD_WORKING_DIRECTORY") #self.log(Level.INFO, "Artifact id for TSK_PREFETCH ==> " + str(artID_pf)) # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "JL_AD") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "JL_AD Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + temp_dir + " Parm 2 ==> " + Temp_Dir + "\JL_AD.db3") output = subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "JL_AD.db3"), self.path_to_app_id_db], stdout=subprocess.PIPE).communicate()[0] #self.log(Level.INFO, "Output for the JL_AD program ==> " + output) self.log(Level.INFO, " Return code is ==> " + output) # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Temp_Dir, "JL_AD.db3") self.log(Level.INFO, "Path to the JL_AD database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.automaticDestinations-ms") for file in files: file_name = os.path.splitext(file.getName())[0] self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() SQL_Statement = "select File_Name, File_Description, Item_Name, command_line_arguments, drive_type, drive_serial_number, " + \ " description, environment_variables_location, file_access_time, file_attribute_flags, file_creation_time, " + \ " file_modification_time, file_size, icon_location, link_target_identifier_data, local_path, " + \ " machine_identifier, network_path, relative_path, volume_label, working_directory " + \ " from Automatic_destinations_JL where upper(File_Name) = upper('" + file_name + "');" # " from Automatic_destinations_JL where File_Name||'.automaticDestinations-ms' = '" + file_name + "';" #self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: # self.log(Level.INFO, "Result (" + resultSet.getString("File_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Recovered_Record") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Computer_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Identifier_Qualifiers") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Level") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Offset") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Source_Name") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_User_Security_Identifier") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Time_Epoch") + ")") # self.log(Level.INFO, "Result (" + resultSet.getString("Event_Detail_Text") + ")") File_Name = resultSet.getString("File_Name") File_Description = resultSet.getString("File_Description") Item_Name = resultSet.getString("Item_Name") Command_Line_Arguments = resultSet.getString("command_line_arguments") Drive_Type = resultSet.getInt("drive_type") Drive_Serial_Number = resultSet.getInt("drive_serial_number") Description = resultSet.getString("description") Environment_Variables_Location = resultSet.getString("environment_variables_location") File_Access_Time = resultSet.getString("file_access_time") File_Attribute_Flags = resultSet.getInt("file_attribute_flags") File_Creation_Time = resultSet.getString("file_creation_time") File_Modification_Time = resultSet.getString("file_modification_time") File_Size = resultSet.getInt("file_size") Icon_Location = resultSet.getString("icon_location") Link_Target_Identifier_Data = resultSet.getString("link_target_identifier_data") Local_Path = resultSet.getString("local_path") Machine_Identifier = resultSet.getString("machine_identifier") Network_Path = resultSet.getString("network_path") Relative_Path = resultSet.getString("relative_path") Volume_Label = resultSet.getString("volume_label") Working_Directory = resultSet.getString("working_directory") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") #fileManager = Case.getCurrentCase().getServices().getFileManager() #files = fileManager.findFiles(dataSource, Prefetch_File_Name) #for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Not the proper way to do it but it will work for the time being. art = file.newArtifact(artID_jl_ad) # This is for when proper atributes can be created. art.addAttributes(((BlackboardAttribute(attID_jl_fn, JumpListADDbIngestModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_jl_fg, JumpListADDbIngestModuleFactory.moduleName, File_Description)), \ (BlackboardAttribute(attID_jl_in, JumpListADDbIngestModuleFactory.moduleName, Item_Name)), \ (BlackboardAttribute(attID_jl_cl, JumpListADDbIngestModuleFactory.moduleName, Command_Line_Arguments)), \ (BlackboardAttribute(attID_jl_dt, JumpListADDbIngestModuleFactory.moduleName, Drive_Type)), \ (BlackboardAttribute(attID_jl_dsn, JumpListADDbIngestModuleFactory.moduleName, Drive_Serial_Number)), \ (BlackboardAttribute(attID_jl_des, JumpListADDbIngestModuleFactory.moduleName, Description)), \ (BlackboardAttribute(attID_jl_evl, JumpListADDbIngestModuleFactory.moduleName, Environment_Variables_Location)), \ (BlackboardAttribute(attID_jl_fat, JumpListADDbIngestModuleFactory.moduleName, File_Access_Time)), \ (BlackboardAttribute(attID_jl_faf, JumpListADDbIngestModuleFactory.moduleName, File_Attribute_Flags)), \ (BlackboardAttribute(attID_jl_fct, JumpListADDbIngestModuleFactory.moduleName, File_Creation_Time)), \ (BlackboardAttribute(attID_jl_fmt, JumpListADDbIngestModuleFactory.moduleName, File_Modification_Time)), \ (BlackboardAttribute(attID_jl_fs, JumpListADDbIngestModuleFactory.moduleName, File_Size)), \ (BlackboardAttribute(attID_jl_ic, JumpListADDbIngestModuleFactory.moduleName, Icon_Location)), \ (BlackboardAttribute(attID_jl_ltid, JumpListADDbIngestModuleFactory.moduleName, Link_Target_Identifier_Data)), \ (BlackboardAttribute(attID_jl_lp, JumpListADDbIngestModuleFactory.moduleName, Local_Path)), \ (BlackboardAttribute(attID_jl_mi, JumpListADDbIngestModuleFactory.moduleName, Machine_Identifier)), \ (BlackboardAttribute(attID_jl_np, JumpListADDbIngestModuleFactory.moduleName, Network_Path)), \ (BlackboardAttribute(attID_jl_rp, JumpListADDbIngestModuleFactory.moduleName, Relative_Path)), \ (BlackboardAttribute(attID_jl_vl, JumpListADDbIngestModuleFactory.moduleName, Volume_Label)), \ (BlackboardAttribute(attID_jl_wd, JumpListADDbIngestModuleFactory.moduleName, Working_Directory)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) # Clean up skCase_Tran.commit() stmt.close() dbConn.close() try: os.remove(lclDbPath) except: self.log(Level.INFO, "Failed to remove the file " + lclDbPath) #skCase.close() #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of JL_AD file failed " + os.path.join(temp_dir, file.getName())) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of JL_AD directory failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "JumpList AD", " JumpList AD Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(JumpListADDbIngestModuleFactory.moduleName, artID_jl_ad_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); Temp_Dir = Case.getCurrentCase().getTempDirectory() fileManager = Case.getCurrentCase().getServices().getFileManager() message_desc = '' for Plist_Files in self.List_Of_DBs: files = fileManager.findFiles(dataSource, Plist_Files) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "\\" + \ file.getName() + "-" + str(file.getId()) + " " + Temp_Dir + "\\Plist_File-" + str(file.getId()) + ".db3 ") pipe = Popen([self.path_to_exe, os.path.join(Temp_Dir, (file.getName() + "-" + str(file.getId()))), \ os.path.join(Temp_Dir, ("Plist_File-" + str(file.getId()) + ".db3"))], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) if 'not a valid Plist' in out_text: message_desc = message_desc + "Error Parsing plist file " + file.getName() + ". File not parsed \n" else: extDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "Plist_File-" + str(file.getId()) + ".db3") #self.log(Level.INFO, "Path the sqlite database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % extDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + extDbPath + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name, type from SQLITE_MASTER where type in ('table','view');") #self.log(Level.INFO, "query SQLite Master table") #self.log(Level.INFO, "query " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet.getString("tbl_name") + " ") # while resultSet4.next(): row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_" + file.getName() artifact_desc = "Plist " + file.getName() #self.log(Level.INFO, "Artifact Name ==> " + artifact_name + " Artifact Desc ==> " + artifact_desc) try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_plist = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_plist = skCase.getArtifactTypeID(artifact_name) artID_plist_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_PLIST_" + resultSet2.getString("name").upper() #self.log(Level.INFO, "attribure id for " + attribute_name + " == " + resultSet2.getString("type").upper()) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_plist) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_PLIST_" + Column_Names[Column_Number - 1] #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParsePlists2DBDelRecIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParsePlists2DBDelRecIngestModuleFactory.moduleName, \ artID_plist_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK # Clean up stmt.close() dbConn.close() os.remove(os.path.join(Temp_Dir, "Plist_File-" + str(file.getId()) + ".db3")) os.remove(os.path.join(Temp_Dir, file.getName() + "-" + str(file.getId()))) # After all databases, post a message to the ingest messages in box. if len(message_desc) == 0: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", " Plist files have been parsed " ) IngestServices.getInstance().postMessage(message) else: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Plist Parser", message_desc + " Plist files have been parsed with the above files failing " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() thumb_files = fileManager.findFiles(dataSource, "thumbcache_%.db", "") numFiles = len(thumb_files) self.log(Level.INFO, "Number of Thumbs.db files found ==> " + str(numFiles)) # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() tmp_dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\Thumbcache") except: self.log(Level.INFO, "Thumbcache directory already exists " + Temp_Dir) for thumb_file in thumb_files: if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + thumb_file.getName()) #fileCount += 1 out_dir = os.path.join(Temp_Dir + "\Thumbcache", str(thumb_file.getId()) + "-" + thumb_file.getName()) try: os.mkdir(Temp_Dir + "\Thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName()) except: self.log(Level.INFO, str(thumb_file.getId()) + "-" + thumb_file.getName() + " Directory already exists " + Temp_Dir) # Save the thumbs.DB locally in the ModuleOutput folder. use file id as name to reduce collisions lclDbPath = os.path.join(tmp_dir, str(thumb_file.getId()) + "-" + thumb_file.getName()) ContentUtils.writeToFile(thumb_file, File(lclDbPath)) # Run thumbs_viewer against the selected Database self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_thumbs + " -O " + out_dir + " " + lclDbPath) pipe = Popen([self.path_to_exe_thumbs, "-O", out_dir, lclDbPath], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Get the parent abstract file Information abstract_file_info = skCase.getAbstractFileById(thumb_file.getId()) #self.log(Level.INFO, "Abstract File Info ==> " + str(abstract_file_info)) files = next(os.walk(out_dir))[2] for file in files: self.log(Level.INFO, " File Name is ==> " + file) dev_file = os.path.join(out_dir, file) local_file = os.path.join("ModuleOutput\\thumbcache\\" + str(thumb_file.getId()) + "-" + thumb_file.getName(), file) self.log(Level.INFO, " Dev File Name is ==> " + dev_file) self.log(Level.INFO, " Local File Name is ==> " + local_file) if not(self.check_dervived_existance(dataSource, file, abstract_file_info)): # Add dervived file # Parameters Are: # File Name, Local Path, size, ctime, crtime, atime, mtime, isFile, Parent File, rederive Details, Tool Name, # Tool Version, Other Details, Encoding Type dervived_file = skCase.addDerivedFile(file, local_file, os.path.getsize(dev_file), + \ 0, 0, 0, 0, True, abstract_file_info, "", "thumbcache_viewer_cmd.exe", "1.0.3.4", "", TskData.EncodingType.NONE) #self.log(Level.INFO, "Derived File ==> " + str(dervived_file)) else: pass try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of thumbcache file " + lclDbPath + " failed " ) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Thumbcache", " Thumbcache Files Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Get the temp directory and create the sub directory Temp_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() temp_dir = os.path.join(Temp_Dir, "Volatility") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Plaso Import Directory already exists " + Temp_Dir) # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%", "/") numFiles = len(files) self.log(Level.INFO, "Number of files to process ==> " + str(numFiles)) #file_name = os.path.basename(self.path_to_storage_file) #self.log(Level.INFO, "File Name ==> " + file_name) #base_file_name = os.path.splitext(file_name)[0] #self.database_file = Temp_Dir + "\\volatility\\Plaso.db3" for file in files: self.log(Level.INFO, "File name to process is ==> " + str(file)) self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) image_file = file.getLocalAbsPath() if image_file != None: self.log(Level.INFO, "File name to process is ==> " + str(file.getLocalAbsPath())) file_name = os.path.basename(file.getLocalAbsPath()) self.log(Level.INFO, "File Name ==> " + file_name) base_file_name = os.path.splitext(file_name)[0] self.database_file = os.path.join(temp_dir, base_file_name + ".db3") self.log(Level.INFO, "File Name ==> " + self.database_file) if self.isAutodetect: self.find_profile(image_file) if self.Profile == None: continue for plugin_to_run in self.Plugins: if self.Python_Program: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) if PlatformUtil.isWindowsOS(): pipe = Popen(["Python.exe", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: pipe = Popen(["python", self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) else: self.log(Level.INFO, "Running program ==> " + self.Volatility_Executable + " -f " + file.getLocalAbsPath() + " " + \ "--profile=" + self.Profile + " --output=sqlite --output-file=" + self.database_file + " " + self.Additional_Parms + " " + plugin_to_run) pipe = Popen([self.Volatility_Executable, "-f", file.getLocalAbsPath(), "--profile=" + self.Profile, "--output=sqlite", \ "--output-file=" + self.database_file, self.Additional_Parms, plugin_to_run], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC self.log(Level.INFO, "Path the volatility database file created ==> " + self.database_file) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % self.database_file) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + self.database_file + " (" + e.getMessage() + ")") try: exestmt = dbConn.createStatement() resultx = exestmt.execute('create table plugins_loaded_to_Autopsy (table_name text);') except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") # Query the database try: stmt = dbConn.createStatement() stmt2 = dbConn.createStatement() stmt3 = dbConn.createStatement() stmt4 = dbConn.createStatement() resultSet1 = stmt.executeQuery("Select upper(tbl_name) tbl_name from SQLITE_MASTER where upper(tbl_name) " \ " not in (select table_name from plugins_loaded_to_Autopsy)" \ " and upper(tbl_name) <> 'PLUGINS_LOADED_TO_AUTOPSY';") # Cycle through each row and create artifacts while resultSet1.next(): try: self.log(Level.INFO, "Begin Create New Artifacts ==> " + resultSet1.getString("tbl_name")) artID_art = skCase.addArtifactType( "TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name, "Volatility " + \ resultSet1.getString("tbl_name") + " " + file_name) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") # Get the artifact and attributes artID_art = skCase.getArtifactTypeID("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) artID_art_evt = skCase.getArtifactType("TSK_VOL_" + resultSet1.getString("tbl_name") + "_" + file_name) try: self.log(Level.INFO, "Result (" + resultSet1.getString("tbl_name") + ")") table_name = resultSet1.getString("tbl_name") resultSet4 = stmt4.executeQuery("Select count(*) 'NumRows' from " + resultSet1.getString("tbl_name") + " ") row_count = resultSet4.getInt("NumRows") self.log(Level.INFO, " Number of Rows is " + str(row_count) + " ") if row_count >= 1: SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, SQL_String_2) artifact_name = "TSK_VOL_" + table_name.upper() + "_" + file_name artID_sql = skCase.getArtifactTypeID(artifact_name) artID_sql_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt2.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) attribute_name = "TSK_VOL_" + table_name + "_" + resultSet2.getString("name").upper() if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "LONGVARCHAR": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "BLOB": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") elif resultSet2.getString("type").upper() == "REAL": try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) except: self.log(Level.INFO, "Attributes Creation Error, " + attribute_name + " ==> ") resultSet3 = stmt3.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_sql) Column_Number = 1 for col_name in Column_Names: c_name = "TSK_VOL_" + table_name.upper() + "_" + Column_Names[Column_Number - 1] attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": if resultSet3.getString(Column_Number) == None: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, " ")) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "LONGVARCHAR": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "BLOB": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, "BLOBS Not Supported - Look at actual file")) elif Column_Types[Column_Number - 1] == "REAL": art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getFloat(Column_Number)))) else: art.addAttribute(BlackboardAttribute(attID_ex1, VolatilityIngestModuleFactory.moduleName, long(resultSet3.getString(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(VolatilityIngestModuleFactory.moduleName, \ artID_sql_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from table " + resultSet.getString("tbl_name") + " (" + e.getMessage() + ")") try: # exestmt = createStatement() resultx = exestmt.execute("insert into plugins_loaded_to_Autopsy values ('" + table_name + "');") except SQLException as e: self.log(Level.INFO, "Could not create table plugins_loaded_to_autopsy") except SQLException as e: self.log(Level.INFO, "Error querying database " + file.getName() + " (" + e.getMessage() + ")") # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "VolatilitySettings", " VolatilitySettings Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); try: self.log(Level.INFO, "Begin Create New Artifacts") artID_pf = skCase.addArtifactType( "TSK_PREFETCH", "Windows Prefetch") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") # Create the attribute type, if it exists then catch the error try: attID_pf_fn = skCase.addArtifactAttributeType("TSK_PREFETCH_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Prefetch File Name") except: self.log(Level.INFO, "Attributes Creation Error, Prefetch File Name. ==> ") try: attID_pf_an = skCase.addArtifactAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Actual File Name") except: self.log(Level.INFO, "Attributes Creation Error, Actual File Name. ==> ") try: attID_nr = skCase.addArtifactAttributeType("TSK_PF_RUN_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Program Number Runs") except: self.log(Level.INFO, "Attributes Creation Error, Program Number Runs. ==> ") try: attID_ex1 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_1", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 1") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 1. ==> ") try: attID_ex2 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_2", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 2") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 2. ==> ") try: attID_ex3 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_3", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 3") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 3. ==> ") try: attID_ex4 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_4", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 4") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 4 ==> ") try: attID_ex5 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_5", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 5") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 5. ==> ") try: attID_ex6 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_6", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 6") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 6. ==> ") try: attID_ex7 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_7", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 7") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 7. ==> ") try: attID_ex8 = skCase.addArtifactAttributeType("TSK_PF_EXEC_DTTM_8", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "PF Execution DTTM 8") except: self.log(Level.INFO, "Attributes Creation Error, PF Execution DTTM 8 ==> ") self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_pf = skCase.getArtifactTypeID("TSK_PREFETCH") artID_pf_evt = skCase.getArtifactType("TSK_PREFETCH") attID_pf_fn = skCase.getAttributeType("TSK_PREFETCH_FILE_NAME") attID_pf_an = skCase.getAttributeType("TSK_PREFETCH_ACTUAL_FILE_NAME") attID_nr = skCase.getAttributeType("TSK_PF_RUN_COUNT") attID_ex1 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_1") attID_ex2 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_2") attID_ex3 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_3") attID_ex4 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_4") attID_ex5 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_5") attID_ex6 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_6") attID_ex7 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_7") attID_ex8 = skCase.getAttributeType("TSK_PF_EXEC_DTTM_8") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the prefetch files and the layout.ini file from the /windows/prefetch folder fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.pf") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create prefetch directory in temp directory, if it exists then continue on processing Temp_Dir = os.path.join(Case.getCurrentCase().getTempDirectory(), "Prefetch_Files") self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir) except: self.log(Level.INFO, "Prefetch Directory already exists " + Temp_Dir) # Write out each prefetch file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Case.getCurrentCase().getTempDirectory()) subprocess.Popen([self.path_to_exe, Temp_Dir, os.path.join(Temp_Dir, "Autopsy_PF_DB.db3")]).communicate()[0] # Set the database to be read to the once created by the prefetch parser program lclDbPath = os.path.join(Temp_Dir, "Autopsy_PF_DB.db3") self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select prefetch_File_Name, actual_File_Name, Number_time_file_run, " + " Embeded_date_Time_Unix_1, " + " Embeded_date_Time_Unix_2, " + " Embeded_date_Time_Unix_3, " + " Embeded_date_Time_Unix_4, " + " Embeded_date_Time_Unix_5, " + " Embeded_date_Time_Unix_6, " + " Embeded_date_Time_Unix_7, " + " Embeded_date_Time_Unix_8 " + " from prefetch_file_info ") except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("Prefetch_File_Name") + ")") Prefetch_File_Name = resultSet.getString("Prefetch_File_Name") Actual_File_Name = resultSet.getString("Actual_File_Name") Number_Of_Runs = resultSet.getString("Number_Time_File_Run") Time_1 = resultSet.getInt("Embeded_date_Time_Unix_1") Time_2 = resultSet.getInt("Embeded_date_Time_Unix_2") Time_3 = resultSet.getInt("Embeded_date_Time_Unix_3") Time_4 = resultSet.getInt("Embeded_date_Time_Unix_4") Time_5 = resultSet.getInt("Embeded_date_Time_Unix_5") Time_6 = resultSet.getInt("Embeded_date_Time_Unix_6") Time_7 = resultSet.getInt("Embeded_date_Time_Unix_7") Time_8 = resultSet.getInt("Embeded_date_Time_Unix_8") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, Prefetch_File_Name) for file in files: # Make artifact for TSK_PREFETCH, this can happen when custom attributes are fully supported #art = file.newArtifact(artID_pf) art = file.newArtifact(artID_pf) #self.log(Level.INFO, "Attribute Number ==>" + str(attID_pf_fn) + " " + str(attID_pf_an) ) # Add the attributes to the artifact. art.addAttributes(((BlackboardAttribute(attID_pf_fn, ParsePrefetchDbIngestModuleFactory.moduleName, Prefetch_File_Name)), \ (BlackboardAttribute(attID_pf_an, ParsePrefetchDbIngestModuleFactory.moduleName, Actual_File_Name)), \ (BlackboardAttribute(attID_nr, ParsePrefetchDbIngestModuleFactory.moduleName, Number_Of_Runs)), \ (BlackboardAttribute(attID_ex1, ParsePrefetchDbIngestModuleFactory.moduleName, Time_1)), \ (BlackboardAttribute(attID_ex2, ParsePrefetchDbIngestModuleFactory.moduleName, Time_2)), \ (BlackboardAttribute(attID_ex3, ParsePrefetchDbIngestModuleFactory.moduleName, Time_3)), \ (BlackboardAttribute(attID_ex4, ParsePrefetchDbIngestModuleFactory.moduleName, Time_4)), \ (BlackboardAttribute(attID_ex5, ParsePrefetchDbIngestModuleFactory.moduleName, Time_5)), \ (BlackboardAttribute(attID_ex6, ParsePrefetchDbIngestModuleFactory.moduleName, Time_6)), \ (BlackboardAttribute(attID_ex7, ParsePrefetchDbIngestModuleFactory.moduleName, Time_7)), \ (BlackboardAttribute(attID_ex8, ParsePrefetchDbIngestModuleFactory.moduleName, Time_8)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) #Clean up prefetch directory and files for file in files: try: os.remove(os.path.join(Temp_Dir, file.getName())) except: self.log(Level.INFO, "removal of prefetch file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of prefetch directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Prefetch Analyzer", " Prefetch Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParsePrefetchDbIngestModuleFactory.moduleName, artID_pf_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): #################### # Proprietary code # #################### # Functions def getBlackboardAtt(label, value): return BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.fromLabel( label).getTypeID(), AndroidGeodataXMLFactory.moduleName, value) def cleanString(s): s = s.replace("\n", "").replace("\t", "") while s.startswith(" "): s = s[1:] while s.endswith(" "): s = s[:len(s) - 1] return s # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # FileManager for the current case fileManager = Case.getCurrentCase().getServices().getFileManager() # object to represent the XML tree = et.fromstring(self.data_from_xml) numEl = len(tree) progressBar.switchToDeterminate(numEl) fileCount = 0 elCount = 0 # TODO: remove blank spaces for el in tree: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Counts element elCount += 1 # Inits variables data = [] files = [] path = el.find("path") if path is not None: path = cleanString(path.text) name = el.find("name") if name is not None: name = cleanString(name.text) # If the current element is pic if el.tag == "pic" and path: files = fileManager.findFiles(dataSource, "%", path) # If the current element is db if el.tag == "db" and path: files = fileManager.findFiles(dataSource, "%", path) if name is None\ else fileManager.findFiles(dataSource, name, path) # If the current element is either file or json if el.tag in ("file", "json") and path: files = fileManager.findFiles( dataSource, name, path) if name is not None else [] # If the current element is app if el.tag == "app" and name and path: try: f = getattr(appfun, name) except: self.log(Level.INFO, "Error to load the function " + name) else: filename = el.find("filename") if filename is not None: filename = cleanString(filename.text) files = fileManager.findFiles(dataSource, filename, path) if files: data = f(files) finally: files = [] # Files contain all the files found and file would be each one of them per cycle for file in files: fileCount += 1 # Handles the file handler = FileHandler(file, file.getNameExtension(), file.getName(), file.getUniquePath(), file.getId()) if handler.store_file( Case.getCurrentCase().getTempDirectory()): if el.tag == "pic" and handler.isPic(): res = handler.processPic() if res: res["name"] = handler.getName() res["type"] = "pic" res["description"] = "from pic" data.append({"file": file, "el": [res]}) if el.tag == "db": if handler.connect(): tables_tag = el.find("tables") tables = handler.getTables( ) if not tables_tag else tables_tag fileobj = {"file": file, "el": []} for table in tables: if not tables_tag: resultSet = handler.query(table) tablename = table try: resultSetMetaData = resultSet.getMetaData( ) numColumns = resultSetMetaData.getColumnCount( ) except: resultSetMetaData = None numColumns = None else: tablename = table.attrib["name"] resultSet = handler.query( table.attrib["name"]) resultSetMetaData = None numColumns = None if (tables_tag and resultSet) or ( not tables_tag and (resultSet and resultSetMetaData and numColumns)): rows = [] while resultSet.next(): attributes = {} columns = range( 1, numColumns + 1 ) if not tables_tag else table.findall( "column") for column in columns: if not tables_tag: try: nameColumn = resultSetMetaData.getColumnName( column) except: nameColumn = None else: if column.get( "type" ) == "linked_datetime": res = handler.query( cleanString( column.get( "table"))) while res.next(): try: value = res.getString( cleanString( column.text )) except: pass else: attributes[ "datetime"] = long( value) attributes[ "column_datetime"] = cleanString( column.text ) nameColumn = None else: nameColumn = column = cleanString( column.text) if nameColumn: temp = handler.processDB( resultSet, column, nameColumn, self.dict, False) if temp: if temp[0] == "single": attributes[ temp[1]] = temp[2] attributes[ "name"] = handler.getName( ) attributes[ "type"] = "db" attributes[ "table"] = tablename attributes[ "path"] = handler.getPath( ) if temp[1] in ( "latitude", "longitude", "datetime", "text"): attributes[ "column_" + temp[1]] = nameColumn if temp[0] == "multiple": if temp[1]: for x in temp[1]: x["name"] = handler.getName( ) x["table"] = tablename x["type"] = "db" x["path"] = handler.getPath( ) x["column"] = nameColumn rows = rows + temp[ 1] if attributes: rows.append(attributes) if rows: fileobj["el"] = fileobj["el"] + rows data.append(fileobj) handler.close() if el.tag == "json": res = handler.processJsonFile() if res: for x in res: x["name"] = handler.getName() x["path"] = handler.getPath() x["type"] = "json" x["description"] = "from file json" data.append({"file": file, "el": res}) if el.tag == "file": res = {} res["name"] = handler.getName() res["path"] = handler.getPath() res["type"] = "file" res["text"] = "look at the file" res["description"] = "from file" data.append({"file": file, "el": [res]}) if not handler.delete_file(): self.log( Level.INFO, "Error in deleting the file " + handler.getlclPath()) # Display the results if data: for f in data: file = f["file"] for item in f["el"]: if "latitude" and "longitude" in item: art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE. TSK_GPS_TRACKPOINT) if "datetime" in item and item["datetime"] != "": if isinstance(item["datetime"], str): if el.tag == "pic": att1 = getBlackboardAtt( "TSK_DATETIME", timestamp. getTimestampFromPicDatetime( item["datetime"])) else: att1 = getBlackboardAtt( "TSK_DATETIME", timestamp.getTimestampFromString( item["datetime"])) else: if len(str(item["datetime"])) == 10: att1 = getBlackboardAtt( "TSK_DATETIME", item["datetime"]) elif len(str(item["datetime"])) == 13: att1 = getBlackboardAtt( "TSK_DATETIME", int(item["datetime"] / 1000)) art.addAttribute(att1) att2 = getBlackboardAtt("TSK_GEO_LATITUDE", item["latitude"]) att3 = getBlackboardAtt("TSK_GEO_LONGITUDE", item["longitude"]) att4 = getBlackboardAtt("TSK_PROG_NAME", item["name"]) art.addAttributes([att2, att3, att4]) if "column" in item: att5 = getBlackboardAtt( "TSK_DESCRIPTION", "table: " + item["table"] + ", column = " + item["column"]) art.addAttribute(att5) elif "table" in item: att5 = getBlackboardAtt( "TSK_DESCRIPTION", "table: " + item["table"] + ", column = " + item["column_latitude"] + ", " + item["column_longitude"]) art.addAttribute(att5) elif "description" in item: att5 = getBlackboardAtt( "TSK_DESCRIPTION", item["description"]) art.addAttribute(att5) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException: self.log( Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) elif "text" in item: art_text = file.newArtifact( blackboard.getOrAddArtifactType( "geodataTEXT", "Geodata in text").getTypeID()) att = getBlackboardAtt("TSK_TEXT", item["text"]) art_text.addAttribute(att) if "column_text" and "table" in item: att1 = getBlackboardAtt( "TSK_DESCRIPTION", "table: " + item["table"] + ", column = " + item["column_text"]) art_text.addAttribute(att1) elif "description" in item: att1 = getBlackboardAtt( "TSK_DESCRIPTION", item["description"]) art_text.addAttribute(att1) try: # index the artifact for keyword search blackboard.indexArtifact(art_text) except Blackboard.BlackboardException: self.log( Level.SEVERE, "Error indexing artifact " + art_text.getDisplayName()) # Update the progress bar progressBar.progress(elCount) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent( AndroidGeodataXMLFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "AndroidGeodataXML", str(elCount) + " elements in the XML and " + str(fileCount) + " files processed") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find files named contacts.db, regardless of parent path fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") numFiles = len(files) progressBar.switchToDeterminate(numFiles) fileCount = 0 for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".db") ContentUtils.writeToFile(file, File(lclDbPath)) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log( Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: name = resultSet.getString("name") email = resultSet.getString("email") phone = resultSet.getString("phone") except SQLException as e: self.log( Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields art = file.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) attributes = ArrayList() attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON. getTypeID(), ContactsDbIngestModuleFactory.moduleName, name)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID( ), ContactsDbIngestModuleFactory.moduleName, email)) attributes.add( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER. getTypeID(), ContactsDbIngestModuleFactory.moduleName, phone)) art.addAttributes(attributes) try: # index the artifact for keyword search blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up stmt.close() dbConn.close() os.remove(lclDbPath) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % fileCount) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "SYSTEM", "config") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "Shimcache") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Shimcache Directory already exists " + Temp_Dir) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Saved File ==> " + lclDbPath) # Run the EXE, saving output to a sqlite database #try: # self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " -i " + Temp_Dir + "\\Shimcache\\" + \ # file.getName() + " -o " + Temp_Dir + "\\Shimcache_db.db3") # pipe = Popen([self.path_to_exe, "-i " + Temp_Dir + "\\Shimcache\\" + file.getName(), "-o " + Temp_Dir + \ # "\\Shimcache_db.db3"], stdout=PIPE, stderr=PIPE) self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "//Shimcache//" + \ file.getName() + " " + Temp_Dir + "//Shimcache_db.db3") pipe = Popen([self.path_to_exe, os.path.join(temp_dir, file.getName()), os.path.join(temp_dir, "Shimcache_db.db3")], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) #except: # self.log(Level.INFO, "Error running program shimcache_parser.") # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "Shimcache_db.db3") self.log(Level.INFO, "Path the system database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for system table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_shim = skCase.addArtifactType("TSK_SHIMCACHE", "Shimcache") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_shim = skCase.getArtifactTypeID("TSK_SHIMCACHE") artID_shim_evt = skCase.getArtifactType("TSK_SHIMCACHE") # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_SHIMCACHE_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_shim) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_SHIMCACHE_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParseShimcacheIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log(Level.INFO, "Error getting values from Shimcache table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseShimcacheIngestModuleFactory.moduleName, artID_shim_evt, None)) #Clean up EventLog directory and files try: os.remove(lclDbPath) except: self.log(Level.INFO, "removal of Shimcache tempdb failed " + lclDbPath) for file in files: try: os.remove(os.path.join(temp_dir, file.getName())) except: self.log(Level.INFO, "removal of Shimcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Shimcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Shimcache Parser", " Shimcache Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log(Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType( "TSK_EVTX_LOGS", "Windows Event Logs") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_BY_ID", "Windows Event Logs By Event Id") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") try: attID_ev_fn = skCase.addArtifactAttributeType("TSK_EVTX_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType("TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType("TSK_EVTX_COMPUTER_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log(Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> ") try: attID_ev_el = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_LEVEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType("TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log(Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType("TSK_EVTX_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType("TSK_EVTX_SOURCE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType("TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_BY_ID") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.evtx") # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) try: os.mkdir(Temp_Dir + "\EventLogs") except: self.log(Level.INFO, "Event Log Directory already exists " + Temp_Dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(Temp_Dir + "\EventLogs", file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows if not PlatformUtil.isWindowsOS(): self.log(Level.INFO, "Ignoring data source. Not running on Windows") return IngestModule.ProcessResult.OK # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + " Parm 2 ==> " + Temp_Dir + "\EventLogs.db3") subprocess.Popen([self.path_to_exe, Temp_Dir + "\EventLogs", Temp_Dir + "\EventLogs.db3"]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log(Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # files = [] # fileManager = Case.getCurrentCase().getServices().getFileManager() # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. if self.List_Of_Events[0] != 'ALL': try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " and Event_Identifier in ('" + self.Event_Id_List + "');" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString("Computer_Name") Event_Identifier = resultSet.getInt("Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString("Event_Source_Name") Event_User_Security_Identifier = resultSet.getString("Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString("Event_Detail_Text") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxByEventIDIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxByEventIDIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxByEventIDIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time_Epoch)) else: try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.log(Level.INFO, "This is the to see what the FU is") # Cycle through each row and create artifacts while resultSet_1.next(): try: self.log(Level.INFO, "This is the to see what the FU is 2") #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt("Event_Identifier") Event_ID_Count = resultSet_1.getInt("Number_Of_Events") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") self.log(Level.INFO, "This is the to see what the FU is 3") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log(Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up try: if self.List_Of_Events[0] != 'ALL': stmt.close() else: stmt_1.close() dbConn.close() os.remove(lclDbPath) except: self.log(Level.INFO, "Error closing the statment, closing the database or removing the file") #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\" + file.getName()) except: self.log(Level.INFO, "removal of Event Log file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def shutDown(self): # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, SampleJythonFileIngestModuleFactory.moduleName, str(self.filesFound) + " files found") ingestServices = IngestServices.getInstance().postMessage(message)
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() self.log(Level.INFO, "Starting 2 to process, Just before call to parse_safari_history") skCase = Case.getCurrentCase().getSleuthkitCase(); head, tail = os.path.split(os.path.abspath(__file__)) settings_db = os.path.join(head, "Alexa_DB.db3") #Start to process based on version of OS try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % settings_db) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) Alexa_DB.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the database table for unique file names try: stmt = dbConn.createStatement() process_data_sql = "Select distinct file_name from alexa_databases" self.log(Level.INFO, process_data_sql) resultSet = stmt.executeQuery(process_data_sql) self.log(Level.INFO, "Query Database table for unique file names") except SQLException as e: self.log(Level.INFO, "Error querying database for unique file names") return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet.next(): fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, resultSet.getString("file_name")) numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files for file_name ==> " + resultSet.getString("file_name")) progressBar.switchToDeterminate(numFiles) fileCount = 0; for file in files: # Open the DB using JDBC #lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), SQLite_DB) lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), file.getName() + "-" + str(file.getId())) ContentUtils.writeToFile(file, File(lclDbPath)) #self.log(Level.INFO, "Path the prefetch database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn_x = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) self.log(Level.INFO, "Database ==> " + file.getName()) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + "-" + str(file.getId()) + " (" + e.getMessage() + ")") #return IngestModule.ProcessResult.OK try: stmt_sql = dbConn.createStatement() process_stmt_sql = "select artifact_name, artifact_description, sql_to_run from alexa_databases where file_name = '" + resultSet.getString("file_name") + "';" self.log(Level.INFO, process_stmt_sql) resultSet_sql = stmt_sql.executeQuery(process_stmt_sql) self.log(Level.INFO, "Query Database table for sql statements") except SQLException as e: self.log(Level.INFO, "Error querying database for sql_statements for file " + resultSet.getString("file_name")) # return IngestModule.ProcessResult.OK # Process all the artifacts based on version of the OS while resultSet_sql.next(): try: stmt_1 = dbConn_x.createStatement() sql_to_run = resultSet_sql.getString("sql_to_run") self.log(Level.INFO, sql_to_run) resultSet_3 = stmt_1.executeQuery(sql_to_run) self.log(Level.INFO, "query " + sql_to_run) except SQLException as e: self.log(Level.INFO, "Error querying database for " + resultSet.getString("file_name")) continue # return IngestModule.ProcessResult.OK try: #self.log(Level.INFO, "Begin Create New Artifacts") artID_sql = skCase.addArtifactType(resultSet_sql.getString("artifact_name"), resultSet_sql.getString("artifact_description")) except: self.log(Level.INFO, "Artifacts Creation Error, for artifact. ==> " + resultSet_sql.getString("artifact_name")) artID_hst = skCase.getArtifactTypeID(resultSet_sql.getString("artifact_name")) artID_hst_evt = skCase.getArtifactType(resultSet_sql.getString("artifact_name")) meta = resultSet_3.getMetaData() columncount = meta.getColumnCount() column_names = [] self.log(Level.INFO, "Number of Columns in the table ==> " + str(columncount)) for x in range (1, columncount + 1): self.log(Level.INFO, "Column Name ==> " + meta.getColumnLabel(x)) try: attID_ex1 = skCase.addArtifactAttributeType("TSK_ALEXA_" + meta.getColumnLabel(x).upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, meta.getColumnLabel(x)) except: self.log(Level.INFO, "Attributes Creation Error, " + "TSK_ALEXA_" + meta.getColumnLabel(x) + " ==> ") column_names.append(meta.getColumnLabel(x)) self.log(Level.INFO, "All Columns ==> " + str(column_names)) # Cycle through each row and create artifacts while resultSet_3.next(): try: #self.log(Level.INFO, SQL_String_1) self.log(Level.INFO, "Artifact Is ==> " + str(artID_hst)) art = file.newArtifact(artID_hst) self.log(Level.INFO, "Inserting attribute URL") for col_name in column_names: attID_ex1 = skCase.getAttributeType("TSK_ALEXA_" + col_name.upper()) self.log(Level.INFO, "Inserting attribute ==> " + str(attID_ex1)) self.log(Level.INFO, "Attribute Type ==> " + str(attID_ex1.getValueType())) if attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes String Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Integer Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Long Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getInt(col_name))) except: self.log(Level.INFO, "Attributes Double Creation Error, " + col_name + " ==> ") elif attID_ex1.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getString(col_name))) except: self.log(Level.INFO, "Attributes Byte Creation Error, " + col_name + " ==> ") else: try: art.addAttribute(BlackboardAttribute(attID_ex1, Alexa_DB_ParseIngestModuleFactory.moduleName, resultSet_3.getReal(col_name))) except: self.log(Level.INFO, "Attributes Datatime Creation Error, " + col_name + " ==> ") except SQLException as e: self.log(Level.INFO, "Error getting values from sql statement ==> " + resultSet_sql.getString("artifact_name")) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(Alexa_DB_ParseIngestModuleFactory.moduleName, artID_hst_evt, None)) stmt_1.close() stmt_sql.close() dbConn_x.close() # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Mac OS Recent Artifacts", " Mac OS Recents Artifacts Have Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): self.log(Level.INFO, "Starting to process, Just before call to parse_safari_history") # we don't know how much work there is yet progressBar.switchToIndeterminate() skCase = Case.getCurrentCase().getSleuthkitCase(); self.log(Level.INFO, "Starting Processing of Image") image_names = dataSource.getPaths() self.log(Level.INFO, "Image names ==> " + str(image_names[0])) image_name = str(image_names[0]) # Create VSS directory in ModuleOutput directory, if it exists then continue on processing Mod_Dir = Case.getCurrentCase().getModulesOutputDirAbsPath() Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) vss_output = os.path.join(Mod_Dir, "vss") try: os.mkdir(vss_output) except: self.log(Level.INFO, "Vss already exists " + Temp_Dir) lclDbPath = os.path.join(vss_output, "vss_extract_info.db3") vss_error_log = os.path.join(vss_output, "bad_files.log") # Run the Processing/Extraction process self.log(Level.INFO, "Running prog ==> " + self.path_to_exe_vss + " " + image_name + " " + lclDbPath + " " + vss_output + " " + vss_error_log) pipe = Popen([self.path_to_exe_vss, image_name, lclDbPath, vss_output, vss_error_log], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) try: attID_vs_fn = skCase.addArtifactAttributeType("TSK_VSS_MFT_NUMBER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "MFT Number") except: self.log(Level.INFO, "Attributes Creation Error, MFT Number. ==> ") try: attID_vs_ct = skCase.addArtifactAttributeType("TSK_VSS_DATETIME_CHANGED", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, changed time. ==> ") try: attID_vs_sz = skCase.addArtifactAttributeType("TSK_VSS_FILE_SIZE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "File Size") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " +" (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: stmt = dbConn.createStatement() SQL_Statement = "select ' - '||vss_identifier||' - '||DATETIME((SUBSTR(vss_create_dttm,1,11)-11644473600),'UNIXEPOCH') 'VOL_NAME', " + \ " vss_num, volume_id, vss_identifier from vss_info;" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): dir_list = [] vss_identifier = resultSet.getString("vss_identifier") vss_num = int(resultSet.getString("vss_num")) - 1 dir_list.append(vss_output + "\\vss" + str(vss_num)) services = IngestServices.getInstance() progress_updater = ProgressUpdater() newDataSources = [] # skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() skcase_data = Case.getCurrentCase() # Get a Unique device id using uuid device_id = UUID.randomUUID() self.log(Level.INFO, "device id: ==> " + str(device_id)) skcase_data.notifyAddingDataSource(device_id) # Add data source with files newDataSource = fileManager.addLocalFilesDataSource(str(device_id), "vss" + str(vss_num) + resultSet.getString("VOL_NAME"), "", dir_list, progress_updater) newDataSources.append(newDataSource.getRootDirectory()) # Get the files that were added files_added = progress_updater.getFiles() #self.log(Level.INFO, "Fire Module1: ==> " + str(files_added)) for file_added in files_added: skcase_data.notifyDataSourceAdded(file_added, device_id) #self.log(Level.INFO, "Fire Module1: ==> " + str(file_added)) #skcase.notifyDataSourceAdded(device_id) skCse = Case.getCurrentCase().getSleuthkitCase() vss_fileManager = Case.getCurrentCase().getServices().getFileManager() vss_files = fileManager.findFiles(dataSource, "%" + vss_identifier + "%", "System Volume Information") vss_numFiles = len(vss_files) #self.log(Level.INFO, "Number of VSS FIles is ==> " + str(vss_numFiles) + " <<= FIle Name is ++> " + str(vss_files)) for vs in vss_files: if vs.getName() in "-slack": pass try: self.log(Level.INFO, "Begin Create New Artifacts") artID_vss = skCase.addArtifactType( "TSK_VS_VOLUME_" + str(vss_num), "vss" + str(vss_num) + resultSet.getString("VOL_NAME") + " Files") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_vss = skCase.getArtifactTypeID("TSK_VS_VOLUME_" + str(vss_num)) artID_vss = skCase.getArtifactTypeID("TSK_VS_VOLUME_" + str(vss_num)) artID_vss_evt = skCase.getArtifactType("TSK_VS_VOLUME_" + str(vss_num)) attID_vs_fn = skCase.getAttributeType("TSK_VSS_MFT_NUMBER") attID_vs_ct = skCase.getAttributeType("TSK_VSS_DATETIME_CHANGED") attID_vs_sz = skCase.getAttributeType("TSK_VSS_FILE_SIZE") attID_vs_nm = skCase.getAttributeType("TSK_NAME") attID_vs_pa = skCase.getAttributeType("TSK_PATH") attID_vs_md = skCase.getAttributeType("TSK_DATETIME_MODIFIED") attID_vs_ad = skCase.getAttributeType("TSK_DATETIME_ACCESSED") attID_vs_cr = skCase.getAttributeType("TSK_DATETIME_CREATED") for vs_file in vss_files: if "-slack" in vs_file.getName(): pass else: self.log(Level.INFO, "VSS FIles is ==> " + str(vs_file)) try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select file_name, inode, directory, ctime, mtime, atime, crtime, size " + \ " from vss1_diff where lower(f_type) <> 'dir';" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for vss diff tables (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet_1.next(): try: File_Name = resultSet_1.getString("file_name") Path_Name = resultSet_1.getString("directory") MFT_Number = resultSet_1.getString("inode") Ctime = resultSet_1.getInt("ctime") Mtime = resultSet_1.getInt("mtime") Atime = resultSet_1.getInt("atime") Crtime = resultSet_1.getInt("crtime") File_Size = resultSet_1.getInt("size") except SQLException as e: self.log(Level.INFO, "Error getting values from vss diff table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = vs_file.newArtifact(artID_vss) art.addAttributes(((BlackboardAttribute(attID_vs_nm, VSSIngesttModuleFactory.moduleName, File_Name)), \ (BlackboardAttribute(attID_vs_fn, VSSIngesttModuleFactory.moduleName, MFT_Number)), \ (BlackboardAttribute(attID_vs_pa, VSSIngesttModuleFactory.moduleName, Path_Name)), \ (BlackboardAttribute(attID_vs_cr, VSSIngesttModuleFactory.moduleName, Crtime)), \ (BlackboardAttribute(attID_vs_md, VSSIngesttModuleFactory.moduleName, Mtime)), \ (BlackboardAttribute(attID_vs_ad, VSSIngesttModuleFactory.moduleName, Atime)), \ (BlackboardAttribute(attID_vs_ct, VSSIngesttModuleFactory.moduleName, Ctime)), (BlackboardAttribute(attID_vs_sz, VSSIngesttModuleFactory.moduleName, File_Size)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(VSSIngesttModuleFactory.moduleName, artID_vss_evt, None)) message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Process/Extract VS", " Volume Shadow has been analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): if len(self.List_Of_tables) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseAmcache", " No Amcache tables Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "Amcache.hve") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "amcache") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Amcache Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Example has only a Windows EXE, so bail if we aren't on Windows # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source parm 1 ==> " + Temp_Dir + "\Amcache\Amcache.hve Parm 2 ==> " + Temp_Dir + "\Amcache.db3") subprocess.Popen([self.path_to_exe, os.path.join(temp_dir, "Amcache.hve"), os.path.join(temp_dir, "Amcache.db3")]).communicate()[0] for file in files: # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "Amcache.db3") self.log(Level.INFO, "Path the Amcache database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. for am_table_name in self.List_Of_tables: try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('" + am_table_name + "'); ") # resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER where lower(tbl_name) in ('associated_file_entries', " + \ # "'unassociated_programs', 'program_entries'); ") self.log(Level.INFO, "query SQLite Master table for " + am_table_name) except SQLException as e: self.log(Level.INFO, "Error querying database for Prefetch table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" artifact_name = "TSK_" + table_name.upper() artifact_desc = "Amcache " + table_name.upper() #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, "Artifact_Name ==> " + artifact_name) #self.log(Level.INFO, "Artifact_desc ==> " + artifact_desc) #self.log(Level.INFO, SQL_String_2) try: self.log(Level.INFO, "Begin Create New Artifacts") artID_amc = skCase.addArtifactType( artifact_name, artifact_desc) except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_amc = skCase.getArtifactTypeID(artifact_name) artID_amc_evt = skCase.getArtifactType(artifact_name) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type").upper()) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("type")) #self.log(Level.INFO, "Add Attribute TSK_" + resultSet2.getString("name").upper() + " ==> " + resultSet2.getString("name")) #attID_ex1 = skCase.addAttrType("TSK_" + resultSet2.getString("name").upper(), resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") elif resultSet2.getString("type").upper() == "": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_amc) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") #self.log(Level.INFO, "Result get information for column type " + Column_Types[Column_Number - 1] + " <== ") c_name = "TSK_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " ") attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) elif Column_Types[Column_Number - 1] == "": art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) # elif Column_Types[Column_Number - 1] == "BLOB": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, "BLOBS Not Supported")) # elif Column_Types[Column_Number - 1] == "REAL": # art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, resultSet3.getFloat(Column_Number))) else: #self.log(Level.INFO, "Value for column type ==> " + str(resultSet3.getInt(Column_Number)) + " <== ") art.addAttribute(BlackboardAttribute(attID_ex1, ParseAmcacheIngestModuleFactory.moduleName, long(resultSet3.getInt(Column_Number)))) Column_Number = Column_Number + 1 IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(ParseAmcacheIngestModuleFactory.moduleName, artID_amc_evt, None)) except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() #os.remove(lclDbPath) #Clean up EventLog directory and files for file in files: try: os.remove(Temp_Dir + "\\" + file.getName()) except: self.log(Level.INFO, "removal of Amcache file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(Temp_Dir) except: self.log(Level.INFO, "removal of Amcache directory failed " + Temp_Dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Amcache Parser", " Amcache Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): #Check to see if event logs were selected, if not then send message and error out else process events selected self.log(Level.INFO, "List Of Events ==> " + str(self.List_Of_Events) + " <== Number of Events ==> " + str(len(self.List_Of_Events))) if len(self.List_Of_Events) < 1: message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " No Event Logs Selected to Parse " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.ERROR else: # Check to see if the artifacts exist and if not then create it, also check to see if the attributes # exist and if not then create them skCase = Case.getCurrentCase().getSleuthkitCase(); skCase_Tran = skCase.beginTransaction() try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx = skCase.addArtifactType( "TSK_EVTX_LOGS", "Windows Event Logs") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") try: self.log(Level.INFO, "Begin Create New Artifacts") artID_evtx_Long = skCase.addArtifactType( "TSK_EVTX_LOGS_BY_ID", "Windows Event Logs By Event Id") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") try: attID_ev_fn = skCase.addArtifactAttributeType("TSK_EVTX_FILE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Log File Name") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_rc = skCase.addArtifactAttributeType("TSK_EVTX_RECOVERED_RECORD", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Recovered Record") except: self.log(Level.INFO, "Attributes Creation Error, Recovered Record. ==> ") try: attID_ev_cn = skCase.addArtifactAttributeType("TSK_EVTX_COMPUTER_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Computer Name") except: self.log(Level.INFO, "Attributes Creation Error, Computer Name. ==> ") try: attID_ev_ei = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Identiifier") except: self.log(Level.INFO, "Attributes Creation Error, Event Log File Name. ==> ") try: attID_ev_eiq = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Identifier Qualifiers") except: self.log(Level.INFO, "Attributes Creation Error, Event Identifier Qualifiers. ==> ") try: attID_ev_el = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_LEVEL", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Level") except: self.log(Level.INFO, "Attributes Creation Error, Event Level. ==> ") try: attID_ev_oif = skCase.addArtifactAttributeType("TSK_EVTX_OFFSET_IN_FILE", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Offset In File") except: self.log(Level.INFO, "Attributes Creation Error, Event Offset In File. ==> ") try: attID_ev_id = skCase.addArtifactAttributeType("TSK_EVTX_IDENTIFIER", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Identifier") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_sn = skCase.addArtifactAttributeType("TSK_EVTX_SOURCE_NAME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Source Name") except: self.log(Level.INFO, "Attributes Creation Error, Source Name. ==> ") try: attID_ev_usi = skCase.addArtifactAttributeType("TSK_EVTX_USER_SECURITY_ID", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "User Security ID") except: self.log(Level.INFO, "Attributes Creation Error, User Security ID. ==> ") try: attID_ev_et = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time") except: self.log(Level.INFO, "Attributes Creation Error, Event Time. ==> ") try: attID_ev_ete = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_TIME_EPOCH", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Time Epoch") except: self.log(Level.INFO, "Attributes Creation Error, Identifier. ==> ") try: attID_ev_dt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Event Detail") except: self.log(Level.INFO, "Attributes Creation Error, Event Detail. ==> ") try: attID_ev_cnt = skCase.addArtifactAttributeType("TSK_EVTX_EVENT_ID_COUNT", BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, "Event Id Count") except: self.log(Level.INFO, "Attributes Creation Error, Event ID Count. ==> ") #self.log(Level.INFO, "Get Artifacts after they were created.") # Get the new artifacts and attributes that were just created artID_evtx = skCase.getArtifactTypeID("TSK_EVTX_LOGS") artID_evtx_evt = skCase.getArtifactType("TSK_EVTX_LOGS") artID_evtx_Long = skCase.getArtifactTypeID("TSK_EVTX_LOGS_BY_ID") artID_evtx_Long_evt = skCase.getArtifactType("TSK_EVTX_LOGS_BY_ID") attID_ev_fn = skCase.getAttributeType("TSK_EVTX_FILE_NAME") attID_ev_rc = skCase.getAttributeType("TSK_EVTX_RECOVERED_RECORD") attID_ev_cn = skCase.getAttributeType("TSK_EVTX_COMPUTER_NAME") attID_ev_ei = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER") attID_ev_eiq = skCase.getAttributeType("TSK_EVTX_EVENT_IDENTIFIER_QUALIFERS") attID_ev_el = skCase.getAttributeType("TSK_EVTX_EVENT_LEVEL") attID_ev_oif = skCase.getAttributeType("TSK_EVTX_OFFSET_IN_FILE") attID_ev_id = skCase.getAttributeType("TSK_EVTX_IDENTIFIER") attID_ev_sn = skCase.getAttributeType("TSK_EVTX_SOURCE_NAME") attID_ev_usi = skCase.getAttributeType("TSK_EVTX_USER_SECURITY_ID") attID_ev_et = skCase.getAttributeType("TSK_EVTX_EVENT_TIME") attID_ev_ete = skCase.getAttributeType("TSK_EVTX_EVENT_TIME_EPOCH") attID_ev_dt = skCase.getAttributeType("TSK_EVTX_EVENT_DETAIL_TEXT") attID_ev_cnt = skCase.getAttributeType("TSK_EVTX_EVENT_ID_COUNT") # we don't know how much work there is yet progressBar.switchToIndeterminate() # Find the Windows Event Log Files files = [] fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "%.evtx") # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) #self.log(Level.INFO, "found " + str(file_name) + " files") #self.log(Level.INFO, "found " + str(files) + " files") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "EventLogs") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Event Log Directory already exists " + temp_dir) # Write out each Event Log file to the temp directory for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) # Run the EXE, saving output to a sqlite database self.log(Level.INFO, "Running program on data source " + self.path_to_exe + " parm 1 ==> " + temp_dir + " Parm 2 ==> " + os.path.join(Temp_Dir,"\EventLogs.db3")) subprocess.Popen([self.path_to_exe, temp_dir, os.path.join(Temp_Dir, "EventLogs.db3")]).communicate()[0] # Set the database to be read to the one created by the Event_EVTX program lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), "EventLogs.db3") self.log(Level.INFO, "Path to the Eventlogs database file created ==> " + lclDbPath) # Open the DB using JDBC try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) " + file.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # files = [] # fileManager = Case.getCurrentCase().getServices().getFileManager() # if self.List_Of_Events[0] == 'ALL': # files = fileManager.findFiles(dataSource, "%.evtx") # else: # for eventlog in self.List_Of_Events: # file_name = fileManager.findFiles(dataSource, eventlog) # files.extend(file_name) for file in files: file_name = file.getName() self.log(Level.INFO, "File To process in SQL " + file_name + " <<=====") # Query the contacts table in the database and get all columns. if self.List_Of_Events[0] != 'ALL': try: stmt = dbConn.createStatement() SQL_Statement = "SELECT File_Name, Recovered_Record, Computer_name, Event_Identifier, " + \ " Event_Identifier_Qualifiers, Event_Level, Event_offset, Identifier, " + \ " Event_source_Name, Event_User_Security_Identifier, Event_Time, " + \ " Event_Time_Epoch, Event_Detail_Text FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " and Event_Identifier in ('" + self.Event_Id_List + "');" self.log(Level.INFO, "SQL Statement " + SQL_Statement + " <<=====") resultSet = stmt.executeQuery(SQL_Statement) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Cycle through each row and create artifacts while resultSet.next(): try: #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Computer_Name = resultSet.getString("Computer_Name") Event_Identifier = resultSet.getInt("Event_Identifier") #Event_Identifier_Qualifiers = resultSet.getString("Event_Identifier_Qualifiers") Event_Level = resultSet.getString("Event_Level") #Event_Offset = resultSet.getString("Event_Offset") #Identifier = resultSet.getString("Identifier") Event_Source_Name = resultSet.getString("Event_Source_Name") Event_User_Security_Identifier = resultSet.getString("Event_User_Security_Identifier") Event_Time = resultSet.getString("Event_Time") #Event_Time_Epoch = resultSet.getString("Event_Time_Epoch") Event_Detail_Text = resultSet.getString("Event_Detail_Text") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art = file.newArtifact(artID_evtx) art.addAttributes(((BlackboardAttribute(attID_ev_cn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Computer_Name)), \ (BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_el, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Level)), \ (BlackboardAttribute(attID_ev_sn, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Source_Name)), \ (BlackboardAttribute(attID_ev_usi, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_User_Security_Identifier)), \ (BlackboardAttribute(attID_ev_et, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time)), \ (BlackboardAttribute(attID_ev_dt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Detail_Text)))) # These attributes may also be added in the future #art.addAttribute(BlackboardAttribute(attID_ev_fn, ParseEvtxByEventIDIngestModuleFactory.moduleName, File_Name)) #art.addAttribute(BlackboardAttribute(attID_ev_rc, ParseEvtxByEventIDIngestModuleFactory.moduleName, Recovered_Record)) #art.addAttribute(BlackboardAttribute(attID_ev_eiq, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier_Qualifiers)) #art.addAttribute(BlackboardAttribute(attID_ev_oif, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Offset)) #art.addAttribute(BlackboardAttribute(attID_ev_id, ParseEvtxByEventIDIngestModuleFactory.moduleName, Identifier)) #art.addAttribute(BlackboardAttribute(attID_ev_ete, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Time_Epoch)) else: try: stmt_1 = dbConn.createStatement() SQL_Statement_1 = "select event_identifier, file_name, count(*) 'Number_Of_Events' " + \ " FROM Event_Logs where upper(File_Name) = upper('" + file_name + "')" + \ " group by event_identifier, file_name order by 3;" self.log(Level.INFO, "SQL Statement " + SQL_Statement_1 + " <<=====") resultSet_1 = stmt_1.executeQuery(SQL_Statement_1) except SQLException as e: self.log(Level.INFO, "Error querying database for EventLogs table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK self.log(Level.INFO, "This is the to see what the FU is") # Cycle through each row and create artifacts while resultSet_1.next(): try: self.log(Level.INFO, "This is the to see what the FU is 2") #File_Name = resultSet.getString("File_Name") #Recovered_Record = resultSet.getString("Recovered_Record") Event_Identifier = resultSet_1.getInt("Event_Identifier") Event_ID_Count = resultSet_1.getInt("Number_Of_Events") except SQLException as e: self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")") self.log(Level.INFO, "This is the to see what the FU is 3") # Make an artifact on the blackboard, TSK_PROG_RUN and give it attributes for each of the fields # Make artifact for TSK_EVTX_LOGS art_1 = file.newArtifact(artID_evtx_Long) self.log(Level.INFO, "Type of Object is ==> " + str(type(Event_ID_Count))) art_1.addAttributes(((BlackboardAttribute(attID_ev_ei, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_Identifier)), \ (BlackboardAttribute(attID_ev_cnt, ParseEvtxByEventIDIngestModuleFactory.moduleName, Event_ID_Count)))) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_Long_evt, None)) # Clean up try: if self.List_Of_Events[0] != 'ALL': stmt.close() else: stmt_1.close() dbConn.close() os.remove(lclDbPath) except: self.log(Level.INFO, "Error closing the statment, closing the database or removing the file") #Clean up EventLog directory and files for file in files: try: os.remove(os.path.join(temp_dir,file.getName())) except: self.log(Level.INFO, "removal of Event Log file failed " + Temp_Dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of Event Logs directory failed " + Temp_Dir) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ParseEvtx", " Event Logs have been parsed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseEvtxByEventIDIngestModuleFactory.moduleName, artID_evtx_evt, None)) return IngestModule.ProcessResult.OK
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() filesToExtract = ("SAM", "SAM.LOG1", "SAM.LOG2", "SYSTEM", "SYSTEM.LOG1", "SYSTEM.LOG2") # Set the database to be read to the once created by the prefetch parser program skCase = Case.getCurrentCase().getSleuthkitCase() fileManager = Case.getCurrentCase().getServices().getFileManager() # Create BAM directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() temp_dir = os.path.join(Temp_Dir, "bam") self.log(Level.INFO, "create Directory " + temp_dir) try: os.mkdir(temp_dir) except: self.log(Level.INFO, "bam Directory already exists " + temp_dir) systemAbsFile = [] for fileName in filesToExtract: files = fileManager.findFiles(dataSource, fileName, "Windows/System32/Config") numFiles = len(files) #self.log(Level.INFO, "Number of SAM Files found ==> " + str(numFiles)) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Parent Path ==> " + str(file.getParentPath())) if file.getParentPath() == '/Windows/System32/config/': # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, file.getName()) ContentUtils.writeToFile(file, File(lclDbPath)) if file.getName() == 'SYSTEM': systemAbsFile = file else: self.log( Level.INFO, "Skipping File " + file.getName() + " In Path " + file.getParentPath()) # Run the EXE, saving output to a sqlite database self.log( Level.INFO, "Running program on " + self.pathToExe + temp_dir + " " + os.path.join(temp_dir, 'bam.csv')) pipe = Popen( [self.pathToExe, temp_dir, os.path.join(temp_dir, "bam.csv")], stdout=PIPE, stderr=PIPE) outText = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + outText) # Setup Artifact try: self.log(Level.INFO, "Begin Create New Artifacts") artID_ls = skCase.addArtifactType("TSK_BAM_KEY", "BAM Registry Key") except: self.log( Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> " ) artifactName = "TSK_BAM_KEY" artIdCsv = skCase.getArtifactTypeID(artifactName) # Read CSV File and Import into Autopsy headingRead = False attributeNames = [] with open(os.path.join(temp_dir, 'bam.csv'), 'rU') as csvfile: csvreader = csv.reader(csvfile, delimiter=',', quotechar='|') for row in csvreader: if not headingRead: for colName in row: attributeNames.append(colName.upper().strip()) headingRead = True else: art = systemAbsFile.newArtifact(artIdCsv) for (data, head) in zip(row, attributeNames): try: art.addAttribute( BlackboardAttribute( skCase.getAttributeType(head), BamKeyIngestModuleFactory.moduleName, data)) except: art.addAttribute( BlackboardAttribute( skCase.getAttributeType(head), BamKeyIngestModuleFactory.moduleName, int(data))) #Clean up prefetch directory and files try: shutil.rmtree(temp_dir) except: self.log(Level.INFO, "removal of directory tree failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, "BamKey", " BamKey Files Have Been Analyzed ") IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK