def process_mft(self, par_id, configuration, table_list, knowledge_base): mft_object = open(self._mft_path, 'rb') mft_file = MFT.MasterFileTableParser(mft_object) info = [par_id, configuration.case_id, configuration.evidence_id] mft_list = [] for file_record in mft_file.file_records(): try: file_paths = mft_file.build_full_paths(file_record, True) self.path_dict[file_record] = [e[0] for e in file_paths] except MFT.MasterFileTableException: continue # TODO: file_path 중복 수정 if not file_paths: mft_item = mft_parser.mft_parse(info, mft_file, file_record, file_paths, knowledge_base.time_zone) else: mft_item = mft_parser.mft_parse(info, mft_file, file_record, [file_paths[0]], knowledge_base.time_zone) mft_list.extend(mft_item) query = f"Insert into {table_list[0]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, " \ f"%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, mft_list) # print(f'mft num: {len(mft_list)}') return mft_file
def process_mft(self, par_id, configuration, table_list): mft_object = open(self._mft_path, 'rb') mft_file = MFT.MasterFileTableParser(mft_object) info = tuple( [par_id, configuration.case_id, configuration.evidence_id]) query = f"Insert into {table_list[0]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, " \ f"%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" mft_list = [] for idx, file_record in tqdm(enumerate(mft_file.file_records())): try: file_paths = mft_file.build_full_paths(file_record, True) except MFT.MasterFileTableException: continue # TODO: file_path 중복 수정 if not file_paths: mft_list.extend( mft_parser.mft_parse(info, mft_file, file_record, file_paths)) else: mft_list.extend( mft_parser.mft_parse(info, mft_file, file_record, [file_paths[0]])) print(f'mft num: {len(mft_list)}') configuration.cursor.bulk_execute(query, mft_list)
def process_logfile(self, par_id, configuration, table_list): logfile_object = open(self._logfile_path, 'rb') mft_object = open(self._mft_path, 'rb') log_file = LogFile.LogFileParser(logfile_object) mft_file = MFT.MasterFileTableParser(mft_object) restart_area_list = [] log_record_list = [] info = tuple( [par_id, configuration.case_id, configuration.evidence_id]) for idx, log_item in tqdm(enumerate(log_file.parse_ntfs_records())): if not type(log_item): continue elif type(log_item) is LogFile.NTFSRestartArea: output_data = logfile_parser.restart_area_parse(log_item) restart_area_list.append(info + tuple(output_data)) elif type(log_item) is LogFile.NTFSLogRecord: output_data = logfile_parser.log_record_parse( log_item, mft_file) log_record_list.append(info + tuple(output_data)) restart_area_query = f"Insert into {table_list[1]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" log_record_query = f"Insert into {table_list[2]} values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" print(f'restart area num: {len(restart_area_list)}') print(f'log record num: {len(log_record_list)}') configuration.cursor.bulk_execute(restart_area_query, restart_area_list) configuration.cursor.bulk_execute(log_record_query, log_record_list)
def process_usnjrnl(self, par_id, configuration, table_list): usn_object = open(self._usnjrnl_path, 'rb') mft_object = open(self._mft_path, 'rb') usn_journal = USN.ChangeJournalParser(usn_object) mft_file = MFT.MasterFileTableParser(mft_object) query = f"Insert into {table_list[3]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" usnjrnl_list = [] for idx, usn_record in tqdm(enumerate(usn_journal.usn_records())): usn_tuple = usnjrnl_parser.usnjrnl_parse(mft_file, usn_record) values = (par_id, configuration.case_id, configuration.evidence_id) + usn_tuple usnjrnl_list.append(values) print(f'usnjrnl num: {len(usnjrnl_list)}') configuration.cursor.bulk_execute(query, usnjrnl_list)