def ProcessAdvancedModules(self, configuration): for source_path_spec in configuration.source_path_specs: if source_path_spec.IsFileSystem() \ or source_path_spec.type_indicator == dfvfs_definitions.TYPE_INDICATOR_OS: try: par_id = self.get_partition_id(source_path_spec, configuration) for advanced_module_name in self._advanced_modules: advanced_module = self._advanced_modules.get( advanced_module_name, None) if isinstance( advanced_module, advanced_modules_interface. AdvancedModuleAnalyzer): advanced_module.print_run_info( advanced_module.DESCRIPTION, start=True) advanced_module.Analyze( par_id=par_id, configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) advanced_module.print_run_info( advanced_module.DESCRIPTION, start=False) except RuntimeError as exception: raise errors.BackEndError( ('The module cannot be connected: {0!s}' ).format(exception))
def Process(self, configuration): and_flag = False for source_path_spec in configuration.source_path_specs: if source_path_spec.parent.TYPE_INDICATOR == 'VSHADOW': continue if source_path_spec.IsFileSystem(): try: for module_name in self._modules: module = self._modules.get(module_name, None) if isinstance(module, modules_interface.ModuleConnector): if module_name == 'andforensics_connector': if not and_flag: module.Connect( configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) and_flag = True else: module.Connect( configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) except RuntimeError as exception: raise errors.BackEndError( ('The module cannot be connected: {0!s}' ).format(exception))
def ProcessAdvancedModules(self, configuration): for source_path_spec in configuration.source_path_specs: if source_path_spec.parent.TYPE_INDICATOR == 'VSHADOW': continue if source_path_spec.IsFileSystem(): try: for advanced_module_name in self._advanced_modules: advanced_module = self._advanced_modules.get( advanced_module_name, None) if isinstance( advanced_module, advanced_modules_interface. AdvancedModuleAnalyzer): advanced_module.Analyze( configuration=configuration, source_path_spec=source_path_spec) except RuntimeError as exception: raise errors.BackEndError( ('The module cannot be connected: {0!s}' ).format(exception))
def Process(self, configuration): and_flag = False if configuration.source_path_specs[0].TYPE_INDICATOR == 'APFS': module = self._modules.get('macos_connector', None) module.Connect(par_id='', configuration=configuration, source_path_spec=configuration.source_path_specs[0], knowledge_base=self.knowledge_base) else: """# for ReFS for source_path_spec in configuration.source_path_specs: for module_name in self._modules: if module_name == 'refs_connector': module = self._modules.get('refs_connector', None) module.Connect(par_id='', configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base)""" for source_path_spec in configuration.source_path_specs: if source_path_spec.IsFileSystem() \ or source_path_spec.type_indicator == dfvfs_definitions.TYPE_INDICATOR_OS: try: par_id = self.get_partition_id(source_path_spec, configuration) for module_name in self._modules: module = self._modules.get(module_name, None) if isinstance(module, modules_interface.ModuleConnector): # Android Forensic Module if module_name == 'andforensics_connector': if not and_flag: module.Connect( par_id=par_id, configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) and_flag = True # Skip these modules elif module_name in [ 'andforensics_connector', 'fica_connector', 'extract_connector', 'image_classification_connector', 'kakaotalk_mobile_decrypt_connector', 'android_basic_apps_connector', 'android_user_apps_connector', 'macos_connector' ]: pass # Other modules else: module.print_run_info(module.DESCRIPTION, start=True) module.Connect( par_id=par_id, configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) module.print_run_info(module.DESCRIPTION, start=False) except RuntimeError as exception: raise errors.BackEndError( 'The module cannot be connected: {0!s}'.format( exception))
def Process(self, configuration): and_flag = False macos_flag = False for source_path_spec in configuration.source_path_specs: if source_path_spec.IsFileSystem() \ or source_path_spec.type_indicator == dfvfs_definitions.TYPE_INDICATOR_OS: try: par_id = self.get_partition_id(source_path_spec, configuration) # if configuration.source_type == 'directory' or 'file': for module_name in self._modules: module = self._modules.get(module_name, None) if source_path_spec.TYPE_INDICATOR == 'APFS': # for APFS & GUI if module.NAME != 'macos_connector': module.print_run_info(module.DESCRIPTION, start=True) module.print_run_info(module.DESCRIPTION, start=False) continue if isinstance(module, modules_interface.ModuleConnector): # Android Forensic Module if module_name == 'andforensics_connector': if not and_flag: module.print_run_info(module.DESCRIPTION, start=True) module.Connect( par_id=par_id, configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) module.print_run_info(module.DESCRIPTION, start=False) and_flag = True else: # for GUI module.print_run_info(module.DESCRIPTION, start=True) module.print_run_info(module.DESCRIPTION, start=False) # Skip these modules elif module_name in [ 'andforensics_connector', 'fica_connector', 'extract_connector', 'image_classification_connector' ]: pass elif module_name == 'macos_connector': if not macos_flag: module.print_run_info(module.DESCRIPTION, start=True) module.Connect( par_id='', configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) module.print_run_info(module.DESCRIPTION, start=False) macos_flag = True else: # for GUI module.print_run_info(module.DESCRIPTION, start=True) module.print_run_info(module.DESCRIPTION, start=False) # Other modules else: module.print_run_info(module.DESCRIPTION, start=True) module.Connect( par_id=par_id, configuration=configuration, source_path_spec=source_path_spec, knowledge_base=self.knowledge_base) module.print_run_info(module.DESCRIPTION, start=False) except RuntimeError as exception: raise errors.BackEndError( 'The module cannot be connected: {0!s}'.format( exception))
def _InsertFileInfo(self, file_entry, parent_id=0): if file_entry.name in ['', '.', '..']: return files = [] tsk_file = file_entry.GetTSKFile() file = CarpeFile.CarpeFile() if self.standalone_check: del file._id file._name = file_entry.name if len(self._partition_list) > 1: parent_location = getattr(file_entry.path_spec.parent, 'location', None) #if parent_location is None: # return file._p_id = self._partition_list[parent_location[1:]] else: file._p_id = self._partition_list['p1'] # check location = getattr(file_entry.path_spec, 'location', None) if location is None: return else: file._parent_path = 'root' + os.path.dirname(location) file._parent_id = parent_id # entry_type if file_entry.entry_type == 'file': file._dir_type = 5 elif file_entry.entry_type == 'directory': file._dir_type = 3 else: if file_entry.name == '$MBR': file._dir_type = 10 elif file_entry.name == '$FAT1': file._dir_type = 10 elif file_entry.name == '$FAT2': file._dir_type = 10 elif file_entry.name == '$OrphanFiles': file._dir_type = 11 else: file._dir_type = 0 file._meta_type = [lambda: 0, lambda: int(tsk_file.info.meta.type) ][tsk_file.info.meta is not None]() file._file_id = tsk_file.info.meta.addr for attribute in tsk_file: # print(attribute.info.type) # NTFS if attribute.info.type in definitions.ATTRIBUTE_TYPES_TO_ANALYZE: #file._file_id = tsk_file.info.meta.addr if file._file_id in [ 129059, 129095, 129232, 129272, 129732, 129762, 138891, 139168, 142751, 144116, 151577, 173087, 173346, 176877, 176910, 183408 ]: print("asdf") file._inode = [lambda: "{0:d}".format(tsk_file.info.meta.addr), lambda: "{0:d}-{1:d}-{2:d}".format(tsk_file.info.meta.addr, int(attribute.info.type), attribute.info.id)] \ [tsk_file.info.fs_info.ftype in [definitions.TSK_FS_TYPE_NTFS, definitions.TSK_FS_TYPE_NTFS_DETECT]]() # $Standard_Information if attribute.info.type in definitions.ATTRIBUTE_TYPES_TO_ANALYZE_TIME: file._mtime = [ lambda: 0, lambda: tsk_file.info.meta.mtime ][tsk_file.info.meta.mtime is not None]() file._atime = [ lambda: 0, lambda: tsk_file.info.meta.atime ][tsk_file.info.meta.atime is not None]() file._ctime = [ lambda: 0, lambda: tsk_file.info.meta.ctime ][tsk_file.info.meta.ctime is not None]() file._etime = [ lambda: 0, lambda: tsk_file.info.meta.crtime ][tsk_file.info.meta.crtime is not None]() file._mtime_nano = [ lambda: 0, lambda: tsk_file.info.meta.mtime_nano ][tsk_file.info.meta.mtime_nano is not None]() file._atime_nano = [ lambda: 0, lambda: tsk_file.info.meta.atime_nano ][tsk_file.info.meta.atime_nano is not None]() file._ctime_nano = [ lambda: 0, lambda: tsk_file.info.meta.ctime_nano ][tsk_file.info.meta.ctime_nano is not None]() file._etime_nano = [ lambda: 0, lambda: tsk_file.info.meta.crtime_nano ][tsk_file.info.meta.crtime_nano is not None]() # $FileName if attribute.info.type in definitions.ATTRIBUTE_TYPES_TO_ANALYZE_ADDITIONAL_TIME: file._additional_mtime = [ lambda: 0, lambda: tsk_file.info.meta.mtime ][tsk_file.info.meta.mtime is not None]() file._additional_atime = [ lambda: 0, lambda: tsk_file.info.meta.atime ][tsk_file.info.meta.atime is not None]() file._additional_ctime = [ lambda: 0, lambda: tsk_file.info.meta.ctime ][tsk_file.info.meta.ctime is not None]() file._additional_etime = [ lambda: 0, lambda: tsk_file.info.meta.crtime ][tsk_file.info.meta.crtime is not None]() file._additional_mtime_nano = [ lambda: 0, lambda: tsk_file.info.meta.mtime_nano ][tsk_file.info.meta.mtime_nano is not None]() file._additional_atime_nano = [ lambda: 0, lambda: tsk_file.info.meta.atime_nano ][tsk_file.info.meta.atime_nano is not None]() file._additional_ctime_nano = [ lambda: 0, lambda: tsk_file.info.meta.ctime_nano ][tsk_file.info.meta.ctime_nano is not None]() file._additional_etime_nano = [ lambda: 0, lambda: tsk_file.info.meta.crtime_nano ][tsk_file.info.meta.crtime_nano is not None]() if file_entry.IsFile(): _, file._extension = os.path.splitext(file_entry.name) if file._extension: file._extension = file._extension[1:] file._size = int(tsk_file.info.meta.size) file._mode = int(attribute.info.fs_file.meta.mode) file._meta_seq = int(attribute.info.fs_file.meta.seq) file._uid = int(attribute.info.fs_file.meta.uid) file._gid = int(attribute.info.fs_file.meta.gid) file._ads = len(file_entry.data_streams) else: logger.info('TODO: Deal with other attribute types') if file_entry.IsFile(): for data_stream in file_entry.data_streams: signature_result = '' hash_result = '' rds_result = '' if self.signature_check and file._size > 0 and file_entry.IsFile( ): file_object = file_entry.GetFileObject( data_stream_name=data_stream.name) if not file_object: return False try: results = self._signature_tool.ScanFileObject( file_object) if results: sig = results[0].identifier.split(':') signature_result = sig[0] else: file_object.seek(0, os.SEEK_SET) file_content = file_object.read() self._signature_tool.siga.Identify(file_content) if self._signature_tool.siga.ext: signature_result = self._signature_tool.siga.ext[ 1:] except IOError as exception: raise errors.BackEndError( 'Unable to scan file: error: {0:s}'.format( exception)) finally: file_object.close() if self.rds_check and file._size > 0 and file_entry.IsFile(): file_object = file_entry.GetFileObject( data_stream_name=data_stream.name) if not file_object: return False try: hash_result = hashlib.sha1(file_object.read( file._size)).hexdigest().upper() except Exception as exception: raise errors.HashCalculateError( 'Failed to compute SHA1 hash for file({0:s}): error: {1:s} ' .format(file_entry.name, exception)) logger.error( 'Failed to compute SHA1 hash for file: {0:s} '. format(file_entry.name)) continue finally: file_object.close() if file._sha1 in self._rds_set: rds_result = "Matching" else: rds_result = "Not Matching" if data_stream.name: file_ads = CarpeFile.CarpeFile() file_ads.__dict__ = file.__dict__.copy() file_ads._name = file._name + ":" + data_stream.name file_ads._extension = '' file_ads._size = data_stream._tsk_attribute.info.size file_ads._sig_type = signature_result file_ads._sha1 = hash_result file_ads._rds_existed = rds_result files.append(file_ads) else: file._sig_type = signature_result file._sha1 = hash_result file._rds_existed = rds_result files.append(file) else: files.append(file) # for slack _temp_files = copy.deepcopy(files) for _temp_file in _temp_files: slack_size = 0 if file._size > 0 and _temp_file._size % tsk_file.info.fs_info.block_size > 0: slack_size = tsk_file.info.fs_info.block_size - ( _temp_file._size % tsk_file.info.fs_info.block_size) if slack_size > 0: file_slack = CarpeFile.CarpeFile() if self.standalone_check: del (file_slack._id) file_slack._size = slack_size file_slack._file_id = _temp_file._file_id file_slack._p_id = _temp_file._p_id file_slack._parent_path = _temp_file._parent_path file_slack._type = 7 file_slack._name = _temp_file._name + '-slack' files.append(file_slack) self._InsertFileInfoRecords(files)