def LoadTargetFileToMemory(self, source_path_spec, configuration, file_path=None, file_spec=None, data_stream_name=None): try: if not file_spec: find_spec = file_system_searcher.FindSpec( case_sensitive=False, location=file_path, location_separator=source_path_spec.location) else: find_spec = file_spec except ValueError as exception: logger.error( 'Unable to build find specification for path: "{0:s}" with ' 'error: {1!s}'.format(file_path, exception)) path_spec_generator = self._path_spec_extractor.ExtractPathSpecs( [source_path_spec], find_specs=[find_spec], recurse_file_system=False, resolver_context=configuration.resolver_context) for path_spec in path_spec_generator: display_name = path_helper.PathHelper.GetDisplayNameForPathSpec( path_spec) try: file_entry = path_spec_resolver.Resolver.OpenFileEntry( path_spec, resolver_context=configuration.resolver_context) if file_entry is None or not file_entry.IsFile(): logger.warning( 'Unable to open file entry with path spec: {0:s}'. format(display_name)) return False if data_stream_name: file_object = file_entry.GetFileObject( data_stream_name=data_stream_name) if not file_object: return False return file_object elif not data_stream_name: file_object = file_entry.GetFileObject() if not file_object: return False return file_object except KeyboardInterrupt: return False
def check_table_from_yaml(self, configuration, yaml_list, table_list): # Create all table for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # If table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor, configuration.standalone_check) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False return True
def ExtractTargetDirToPath(self, source_path_spec, configuration, dir_path=None, file_spec=None, output_path=None): """Extract target directory to path Args: source_path_spec: configuration: dir_path: output_path: """ try: if not file_spec: find_spec = file_system_searcher.FindSpec( case_sensitive=False, location=dir_path, location_separator=source_path_spec.location) else: find_spec = file_spec except ValueError as exception: logger.error( 'Unable to build find specification for path: "{0:s}" with ' 'error: {1!s}'.format(dir_path, exception)) path_spec_generator = self._path_spec_extractor.ExtractPathSpecs( [source_path_spec], find_specs=[find_spec], recurse_file_system=False, resolver_context=configuration.resolver_context) for path_spec in path_spec_generator: self.DirectoryTraversal(path_spec, output_path)
def Analyze(self, par_id, configuration, source_path_spec, knowledge_base): print('[MODULE]: LV2 Timeline Analyzer') this_file_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep # 모든 yaml 파일 리스트 yaml_list = [this_file_path+'lv2_timeline.yaml'] # 모든 테이블 리스트 table_list = ['lv2_timeline'] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format(table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error('cannot create database table name: {0:s}'.format(table_list[count])) return False print("not yet")
def RecursiveDirOrFileSearch(self, path_spec, output_path): display_name = path_helper.PathHelper.GetDisplayNameForPathSpec( path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) if file_entry is None: logger.warning( 'Unable to open file entry with path spec: {0:s}'.format( display_name)) return False if file_entry.IsDirectory(): if not os.path.exists(output_path + os.sep + file_entry.name): os.mkdir(output_path + os.sep + file_entry.name) for sub_file_entry in file_entry.sub_file_entries: try: if not sub_file_entry.IsAllocated(): continue except dfvfs_errors.BackEndError as exception: logger.warning( 'Unable to process file: {0:s} with error: {1!s}'. format( sub_file_entry.path_spec.comparable.replace( '\n', ';'), exception)) continue if sub_file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_TSK: if file_entry.IsRoot( ) and sub_file_entry.name == '$OrphanFiles': continue self.RecursiveDirOrFileSearch( sub_file_entry.path_spec, output_path + os.sep + file_entry.name) if file_entry.IsFile(): for data_stream in file_entry.data_streams: file_object = file_entry.GetFileObject( data_stream_name=data_stream.name) if not file_object: return False try: buffer_size = 65536 file = open(output_path + os.sep + file_entry.name, 'wb') file_object.seek(0, os.SEEK_SET) data = file_object.read(buffer_size) while data: file.write(data) data = file_object.read(buffer_size) file.close() except IOError as exception: print(display_name) logger.error( 'Failed to extract file "{0:s}" : {1!s}'.format( display_name, exception)) finally: file_object.close()
def ExtractTargetFileToPath(self, source_path_spec, configuration, file_path=None, file_spec=None, output_path=None, data_stream_name=None): # TODO: find_spec 있을 경우 처리 해야함. Load참조 try: if not file_spec: find_spec = file_system_searcher.FindSpec( case_sensitive=False, location=file_path, location_separator=source_path_spec.location) else: find_spec = file_spec except ValueError as exception: logger.error( 'Unable to build find specification for path: "{0:s}" with ' 'error: {1!s}'.format(file_path, exception)) path_spec_generator = self._path_spec_extractor.ExtractPathSpecs( [source_path_spec], find_specs=[find_spec], recurse_file_system=False, resolver_context=configuration.resolver_context) for path_spec in path_spec_generator: display_name = path_helper.PathHelper.GetDisplayNameForPathSpec( path_spec) try: file_entry = path_spec_resolver.Resolver.OpenFileEntry( path_spec, resolver_context=configuration.resolver_context) if file_entry is None or not file_entry.IsFile(): logger.warning( 'Unable to open file entry with path spec: {0:s}'. format(display_name)) return False if data_stream_name: file_object = file_entry.GetFileObject( data_stream_name=data_stream_name) if not file_object: return False try: buffer_size = 65536 file = open( output_path + os.sep + file_entry.name + '_' + data_stream_name, 'wb') file_object.seek(0, os.SEEK_SET) data = file_object.read(buffer_size) while data: file.write(data) data = file_object.read(buffer_size) file.close() except IOError as exception: # TODO: replace location by display name. location = getattr(file_entry.path_spec, 'location', '') logger.error( 'Failed to extract file "{0:s}" : {1!s}'.format( data_stream_name, exception)) return False finally: file_object.close() elif not data_stream_name: file_object = file_entry.GetFileObject() if not file_object: return False try: buffer_size = 65536 file = open(output_path + os.sep + file_entry.name, 'wb') file_object.seek(0, os.SEEK_SET) data = file_object.read(buffer_size) while data: file.write(data) data = file_object.read(buffer_size) file.close() except IOError as exception: logger.error( 'Failed to extract file "{0:s}" : {1!s}'.format( display_name, exception)) finally: file_object.close() except KeyboardInterrupt: return False
def Analyze(self, configuration, source_path_spec): print('[MODULE]: LV2 OS Usage History Analyzer') #try: par_id = configuration.partition_list[getattr(source_path_spec.parent, 'location', None)[1:]] if par_id == None: return False query = f"SELECT name, parent_path, extension FROM file_info WHERE (par_id='{par_id}') and extension = 'evtx' and parent_path = 'root/Windows/System32/winevt/Logs'" eventlog_files = configuration.cursor.execute_query_mul(query) if len(eventlog_files) == 0: return False print('[MODULE]: LV2 OS Win Usage History Analyzer Connect') this_file_path = os.path.dirname(os.path.abspath( __file__)) + os.sep + 'schema' + os.sep + 'visualization' + os.sep # 모든 yaml 파일 리스트 yaml_list = [ this_file_path + 'lv2_visualization_usage_day_detail.yaml', this_file_path + 'lv2_visualization_usage_year.yaml', this_file_path + 'lv2_visualization_usage_day_stat.yaml', this_file_path + 'lv2_visualization_timeline_month.yaml' ] # 모든 테이블 리스트 table_list = [ 'usage_day_detail', 'usage_year', 'usage_day_stat', 'timeline_month' ] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False #USAGE_DAY_DETAIL print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_DAY_DETAIL') insert_data = [] for result in udd.USAGEDAYDETAIL(configuration): try: insert_data.append( tuple([ datetime.strptime(result.regdate, '%Y-%m-%d %H:%M:%S.%f') + timedelta(hours=9), result.evdnc_type, result.artifact_type, result.information, configuration.case_id, configuration.evidence_id ])) except: insert_data.append( tuple([ result.regdate, result.evdnc_type, result.artifact_type, result.information, configuration.case_id, configuration.evidence_id ])) query = "Insert into usage_day_detail values (%s, %s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #USAGE_YEAR print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_YEAR') insert_data = [] for result in uy.USAGEYEAR(configuration): insert_data.append( tuple([ result.year, result.month, result.cnt, configuration.case_id, configuration.evidence_id ])) query = "Insert into usage_year values (%s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #USAGE_DAY_STAT print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_DAY_STAT') insert_data = [] for result in uds.USAGEDAYSTAT(configuration): insert_data.append( tuple([ result.year, result.month, result.day, result.hour, result.min, result.act, configuration.case_id, configuration.evidence_id ])) query = "Insert into usage_day_stat values (%s, %s, %s, %s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #Timeline_month print('[MODULE]: LV2 OS Win Usage History Analyzer - TIMELINE MONTH') insert_data = [] query = "Insert into timeline_month values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" for result in tm.TIMELINEMONTH(configuration): for i in result: insert_data.append(tuple(i)) configuration.cursor.bulk_execute(query, insert_data)
def Analyze(self, configuration, source_path_spec): print('[MODULE]: LV2 OS APP History Analyzer') par_id = configuration.partition_list[getattr(source_path_spec.parent, 'location', None)[1:]] if par_id == None: return False this_file_path = os.path.dirname( os.path.abspath(__file__)) + os.sep + 'schema' + os.sep # 모든 yaml 파일 리스트 yaml_list = [this_file_path + 'lv2_os_mft_history.yaml'] # 모든 테이블 리스트 table_list = ['lv2_os_mft_history'] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False query = f"SELECT file_id, par_id, inode, name, dir_type, size, extension, mtime, atime, ctime, etime, mtime_nano, atime_nano, ctime_nano, etime_nano, parent_path, parent_id FROM file_info WHERE par_id='{par_id}' and not type = \"7\";" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: mtime = result[7] # mtime = file modified time ctime = result[9] # ctime = file created time mtime_nano = result[11] ctime_nano = result[13] # Copied file distinction if mtime - ctime == 0: if mtime_nano - ctime_nano == 0: is_copied = "N" elif mtime_nano - ctime_nano > 0: is_copied = "N" elif mtime_nano - ctime_nano < 0: is_copied = "Y" elif mtime - ctime > 0: is_copied = "N" elif mtime - ctime < 0: is_copied = "Y" # Make Standard Timestamp Format mtime = self._convert_timestamp(result[7]) + "." + str( result[11]) + "Z" atime = self._convert_timestamp(result[8]) + "." + str( result[12]) + "Z" ctime = self._convert_timestamp(result[9]) + "." + str( result[13]) + "Z" etime = self._convert_timestamp(result[10]) + "." + str( result[14]) + "Z" file_id = result[0] par_id = result[1] inode = result[2] name = result[3] dir_type = result[4] size = result[5] extension = result[6] parent_path = result[15] parent_id = result[16] insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, file_id, inode, name, dir_type, size, extension, mtime, atime, ctime, etime, parent_path, parent_id, is_copied ])) query = "Insert into lv2_os_mft_history values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data)
def Analyze(self, configuration, source_path_spec): print('[MODULE]: LV2 OS APP History Analyzer') par_id = configuration.partition_list[getattr(source_path_spec.parent, 'location', None)[1:]] if par_id == None: return False this_file_path = os.path.dirname( os.path.abspath(__file__)) + os.sep + 'schema' + os.sep # 모든 yaml 파일 리스트 yaml_list = [this_file_path + 'lv2_os_app_history.yaml'] # 모든 테이블 리스트 table_list = ['lv2_os_app_history'] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False # UserAssist query = f"SELECT file_name, last_run_time FROM lv1_os_win_reg_user_assist WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0][result[0].rfind('/') + 1:], result[1], result[0], '', 'UserAssist' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) # Amcache - file_entries query = f"SELECT file_name, key_last_updated_time, full_path FROM lv1_os_win_reg_amcache_file WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[1], result[2], '', 'Amcache-file_entries' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) # Prefetch - reference_file 추후에 추가 query = f"SELECT 'program_name', 'program_path', 'program_run_count', 'file_created_time', 'last_run_time', '2nd_last_run_time', '3rd_last_run_time', '4th_last_run_time', '5th_last_run_time', '6th_last_run_time', '7th_last_run_time', '8th_last_run_time' FROM lv1_os_win_prefetch WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[3], result[1], '', 'Prefetch' ])) if result[4] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[4], result[1], '', 'Prefetch' ])) if result[5] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[5], result[1], '', 'Prefetch' ])) if result[6] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[6], result[1], '', 'Prefetch' ])) if result[7] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[7], result[1], '', 'Prefetch' ])) if result[8] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[8], result[1], '', 'Prefetch' ])) if result[9] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[9], result[1], '', 'Prefetch' ])) if result[10] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[10], result[1], '', 'Prefetch' ])) if result[11] != ' ': insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], result[11], result[1], '', 'Prefetch' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) # Windows Timeline query = f"SELECT program_name, start_time, content FROM lv1_os_win_windows_timeline WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0][result[0].rfind('/') + 1:], result[1], result[0], result[2], 'Windows Timeline' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) # Eventlog - application query = f"SELECT application_name, time, path FROM lv1_os_win_event_logs_applications WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[0], str(result[1]), result[2], '', 'Eventlogs-Application' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) # Jumplist - automatics query = f"SELECT file_name, file_path, record_time, application_name FROM lv1_os_win_jumplist_automatics WHERE par_id='{par_id}';" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, result[3], result[2], '', result[1], 'Jumplist-automatics' ])) query = "Insert into lv2_os_app_history values (%s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data)
def Analyze(self, par_id, configuration, source_path_spec, knowledge_base): # try: #query_separator = "/" if source_path_spec.location == "/" else source_path_spec.location * 2 query_separator = self.GetQuerySeparator(source_path_spec, configuration) path_separator = self.GetPathSeparator(source_path_spec) # query = f"SELECT name, parent_path, extension FROM file_info WHERE (par_id='{par_id}') " \ # f"and extension = 'evtx' and parent_path like 'root{query_separator}Windows{query_separator}" \ # f"System32{query_separator}winevt{query_separator}Logs'" # eventlog_files = configuration.cursor.execute_query_mul(query) # # if len(eventlog_files) == 0: # return False if source_path_spec.TYPE_INDICATOR != 'NTFS': return False this_file_path = os.path.dirname(os.path.abspath( __file__)) + os.sep + 'schema' + os.sep + 'visualization' + os.sep # 모든 yaml 파일 리스트 yaml_list = [ this_file_path + 'lv2_visualization_usage_day_detail.yaml', this_file_path + 'lv2_visualization_usage_year.yaml', this_file_path + 'lv2_visualization_usage_day_stat.yaml', this_file_path + 'lv2_visualization_timeline_month.yaml' ] # 모든 테이블 리스트 table_list = [ 'usage_day_detail', 'usage_year', 'usage_day_stat', 'timeline_month_2' ] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False # USAGE_DAY_DETAIL print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_DAY_DETAIL') insert_data = [] for result in udd.USAGEDAYDETAIL(configuration, knowledge_base.time_zone): # try: insert_data.append( tuple([ result.regdate, result.evdnc_type, result.artifact_type, result.information, configuration.case_id, configuration.evidence_id ])) # except: # insert_data.append(tuple( # [result.regdate, result.evdnc_type, result.artifact_type, result.information, # configuration.case_id, configuration.evidence_id] # )) query = "Insert into usage_day_detail values (%s, %s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #USAGE_YEAR print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_YEAR') insert_data = [] for result in uy.USAGEYEAR(configuration): insert_data.append( tuple([ result.year, result.month, result.cnt, configuration.case_id, configuration.evidence_id ])) query = "Insert into usage_year values (%s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #USAGE_DAY_STAT print('[MODULE]: LV2 OS Win Usage History Analyzer - USAGE_DAY_STAT') insert_data = [] for result in uds.USAGEDAYSTAT(configuration): insert_data.append( tuple([ result.year, result.month, result.day, result.hour, result.min, result.act, configuration.case_id, configuration.evidence_id ])) query = "Insert into usage_day_stat values (%s, %s, %s, %s, %s, %s, %s, %s);" if len(insert_data) > 0: configuration.cursor.bulk_execute(query, insert_data) #Timeline_month print('[MODULE]: LV2 OS Win Usage History Analyzer - TIMELINE MONTH') insert_data = [] query = "Insert into timeline_month_2 values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" for result in tm.TIMELINEMONTH(configuration): for i in result: i[0] = configuration.evidence_id insert_data.append(tuple(i)) configuration.cursor.bulk_execute(query, insert_data)
def Analyze(self, par_id, configuration, source_path_spec, knowledge_base): #임시로 한번만 돌게 하자 if par_id == 'p1947b3cd4f3c143b98b009eeeb4e966ef': this_file_path = os.path.dirname( os.path.abspath(__file__)) + os.sep + 'schema' + os.sep # 모든 yaml 파일 리스트 yaml_list = [this_file_path + 'lv2_communication.yaml'] # 모든 테이블 리스트 table_list = ['lv2_communication'] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format( table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist( table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error( 'cannot create database table name: {0:s}'.format( table_list[count])) return False #KAKAOTALK print('[MODULE]: LV2 Communication Analyzer - KAKAOTALK') # par_id 넣어줘야함. 임시로 where par_id는 뻇음 query = f"SELECT friends.name, friends.phone_number, chatlogs.message, chatlogs.created_at, chatrooms.private_meta, chatrooms.members FROM carpe.lv1_app_kakaotalk_mobile_chatlogs as chatlogs, carpe.lv1_app_kakaotalk_mobile_friends as friends, carpe.lv1_app_kakaotalk_mobile_chatrooms as chatrooms where chatlogs.user_id = friends.id and chatlogs.chat_id = chatrooms.id;" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: if result[1] == '': if result[4] == None: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Outgoing', result[0][1:2], result[1], result[2][2:5], datetime.fromtimestamp(int(result[3])).isoformat()+'Z', result[4], result[5]])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Outgoing', result[0], result[1], result[2][2:5], datetime.fromtimestamp(int( result[3])).isoformat() + 'Z', result[4], result[5] ])) else: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Outgoing', result[0][1:2], result[1][4:6], result[2][2:5], datetime.fromtimestamp(int(result[3])).isoformat() + 'Z', result[4].replace('우리서', ''), result[5]])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Outgoing', result[0], result[1], result[2][2:5], datetime.fromtimestamp(int( result[3])).isoformat() + 'Z', result[4], result[5] ])) else: if result[4] == None: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Incoming', result[0][1:2], result[1][4:6], result[2][2:5], datetime.fromtimestamp(int(result[3])).isoformat()+'Z', result[4], result[5]])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Incoming', result[0], result[1], result[2][2:5], datetime.fromtimestamp(int( result[3])).isoformat() + 'Z', result[4], result[5] ])) else: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Incoming', result[0][1:2], result[1][4:6], result[2][2:5], datetime.fromtimestamp(int(result[3])).isoformat() + 'Z', result[4].replace('우리서', ''), result[5]])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'KAKAOTALK', 'Incoming', result[0], result[1], result[2][2:5], datetime.fromtimestamp(int( result[3])).isoformat() + 'Z', result[4], result[5] ])) query = "Insert into lv2_communication values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) #MESSAGES print('[MODULE]: LV2 Communication Analyzer - SMS') # par_id 넣어줘야함. 임시로 where par_id는 뻇음 query = f"SELECT sms.type, sms.address, sms.body, sms.date FROM carpe.lv1_os_and_basic_app_sms as sms;" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: if result[0] == 'sent': #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'SMS', 'Outgoing', 'NULL', result[1], result[2], result[3], 'NULL', 'NULL'])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'SMS', 'Outgoing', 'NULL', result[1][4:6], result[2][2:5], result[3], 'NULL', 'NULL' ])) else: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'SMS', 'Incoming', 'NULL', result[1], result[2], result[3], 'NULL', 'NULL'])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'SMS', 'Incoming', 'NULL', result[1][4:6], result[2][2:5], result[3], 'NULL', 'NULL' ])) query = "Insert into lv2_communication values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) print('[MODULE]: LV2 Communication Analyzer - MMS') # par_id 넣어줘야함. 임시로 where par_id는 뻇음 query = f"SELECT mms.from, mms.to, mms.body, mms.date FROM carpe.lv1_os_and_basic_app_mms as mms;" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: if result[0] == 'insert-address-token': if result[2] != None: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Outgoing', 'NULL', result[1], result[2], result[3], 'NULL', 'NULL'])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Outgoing', 'NULL', result[1][4:6], result[2][2:5], result[3], 'NULL', 'NULL' ])) else: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Outgoing', 'NULL', result[1][4:6], result[2], result[3], 'NULL', 'NULL' ])) else: if result[2] != None: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Incoming', 'NULL', result[0], result[2], result[3], 'NULL', 'NULL'])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Incoming', 'NULL', result[0][4:6], result[2][2:5], result[3], 'NULL', 'NULL' ])) else: insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'MMS', 'Incoming', 'NULL', result[0][4:6], result[2], result[3], 'NULL', 'NULL' ])) query = "Insert into lv2_communication values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data) #CALLS print('[MODULE]: LV2 Communication Analyzer - CALLS') # par_id 넣어줘야함. 임시로 where par_id는 뻇음 query = f"SELECT call_l.type, call_l.partner, call_l.duration_in_secs, call_l.call_date FROM carpe.lv1_os_and_basic_app_call_logs as call_l" results = configuration.cursor.execute_query_mul(query) if len(results) == 0: pass else: insert_data = [] for result in results: #insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, 'CALLS', result[0], 'NULL', result[1], 'Duration : '+result[2]+' seconds', result[3], 'NULL', 'NULL'])) insert_data.append( tuple([ par_id, configuration.case_id, configuration.evidence_id, 'CALLS', result[0], 'NULL', result[1][4:6], 'Duration : ' + result[2] + ' seconds', result[3], 'NULL', 'NULL' ])) query = "Insert into lv2_communication values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data)
def Analyze(self, par_id, configuration, source_path_spec, knowledge_base): if par_id == None or par_id == '': return False else: if source_path_spec.parent.type_indicator != dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION: par_id = configuration.partition_list['p1'] else: par_id = configuration.partition_list[getattr(source_path_spec.parent, 'location', None)[1:]] this_file_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep # 모든 yaml 파일 리스트 yaml_list = [this_file_path+'lv2_timeline.yaml'] # 모든 테이블 리스트 table_list = ['lv2_timeline'] # 모든 테이블 생성 for count in range(0, len(yaml_list)): if not self.LoadSchemaFromYaml(yaml_list[count]): logger.error('cannot load schema from yaml: {0:s}'.format(table_list[count])) return False # if table is not existed, create table if not configuration.cursor.check_table_exist(table_list[count]): ret = self.CreateTable(configuration.cursor) if not ret: logger.error('cannot create database table name: {0:s}'.format(table_list[count])) print("error") return False # todo : 쿼리 call_log_query = f"SELECT call_date, duration_in_secs, phone_account_address, partner, type " \ f"FROM lv1_os_and_basic_app_call_logs" \ f" WHERE par_id='{par_id}';" mms_query = f"SELECT * FROM lv1_os_and_basic_app_mms" \ f" WHERE par_id='{par_id}';" sms_query = f"SELECT date, address, body, type, service_center FROM lv1_os_and_basic_app_sms" \ f" WHERE par_id='{par_id}';" usagestats_query = f"SELECT last_time_active, time_active_in_msecs, package, source " \ f"FROM lv1_os_and_basic_app_usagestats_0"\ f" WHERE par_id='{par_id}';" chrome_download_query = f"SELECT start_time, end_time, download_tab_url, download_path, file_name, os_account, chrome_profile " \ f"FROM lv1_app_web_chrome_download"\ f" WHERE par_id='{par_id}';" chromium_edge_download_query = f"SELECT start_time, end_time, download_tab_url, download_path, file_name, os_account, edge_profile " \ f"FROM lv1_app_web_chromium_edge_download"\ f" WHERE par_id='{par_id}';" whale_download_query = f"SELECT start_time, end_time, download_tab_url, download_path, file_name, os_account, whale_profile " \ f"FROM lv1_app_web_whale_download"\ f" WHERE par_id='{par_id}';" opera_download_query = f"SELECT start_time, end_time, download_tab_url, download_path, file_name, os_account, opera_profile " \ f"FROM lv1_app_web_opera_download"\ f" WHERE par_id='{par_id}';" firefox_download_query = f"SELECT start_time, end_time, url, download_path, os_account, firefox_profile_id " \ f"FROM lv1_app_web_firefox_download"\ f" WHERE par_id='{par_id}';" reg_usb_query = f"SELECT last_insertion_time, source_location, friendly_name, serial_number, device_description, last_assigned_drive_letter " \ f"FROM lv1_os_win_reg_usb_device" \ f" WHERE par_id='{par_id}';" evt_usb_query = f"SELECT time, manufacturer, model, serial_number, event_id, event_id_description, source " \ f"FROM lv1_os_win_event_logs_usb_devices"\ f" WHERE par_id='{par_id}';" table_query_list = [ ['lv1_os_and_basic_app_call_logs', call_log_query], ['lv1_os_and_basic_app_mms', mms_query], ['lv1_os_and_basic_app_sms', sms_query], ['lv1_os_and_basic_app_usagestats_0', usagestats_query], ['lv1_app_web_chrome_download', chrome_download_query], ['lv1_app_web_chromium_edge_download', chromium_edge_download_query], ['lv1_app_web_whale_download', whale_download_query], ['lv1_app_web_opera_download', opera_download_query], ['lv1_app_web_firefox_download', firefox_download_query], ['lv1_os_win_reg_usb_device', reg_usb_query], ['lv1_os_win_event_logs_usb_devices', evt_usb_query] ] insert_data = [] for row in table_query_list: if configuration.cursor.check_table_exist(row[0]): # table 있는지 체크 query = row[1] result = configuration.cursor.execute_query_mul(query) if len(result) != 0: # table은 있지만 레코드 없는 경우 체크 # print("not yet") if row[0] == 'lv1_os_and_basic_app_call_logs': for value in result: event_type = "Call" event_time = value[0] duration = self._convert_secs(int(value[1])) description = "from:%s, to:%s, call_type:%s" % (value[2], value[3], value[4]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_os_and_basic_app_mms': for value in result: event_type = "Message(MMS)" event_time = value[5] duration = "00:00:00" description = "from:%s, to:%s, content:%s" % (value[8], value[9], value[12]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_os_and_basic_app_sms': for value in result: event_type = "Message(SMS)" if value[3] == 'Sent': from_num = '' # Todo : service_center 컬럼 뭔지 확인해서 추가하기 to_num = value[1] else: from_num = value[1] to_num = '' # Todo : service_center 컬럼 뭔지 확인해서 추가하기 event_time = value[0] duration = "00:00:00" description = "from:%s, to:%s, content:%s" % (from_num, to_num, value[2]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_os_and_basic_app_usagestats_0': for value in result: event_type = "Android App" event_time = value[0] duration = self._convert_millisecs(value[1]) description = "package:%s, source:%s" % (value[2], value[3]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_app_web_chrome_download': for value in result: event_type = "Web Download" event_time = value[0] if len(value[1]) != 0: start_time = datetime.datetime.strptime(value[0][:-6], '%Y-%m-%dT%H:%M:%S.%f') end_time = datetime.datetime.strptime(value[1][:-6], '%Y-%m-%dT%H:%M:%S.%f') duration = str(end_time - start_time) else: duration = "00:00:00" description = "browser:Chrome, source:%s, file_path:%s, os_account:%s, chrome profile:%s" \ % (value[2], value[3]+value[4], value[5], value[6]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_app_web_chromium_edge_download': for value in result: event_type = "Web Download" event_time = value[0] if len(value[1]) != 0: start_time = datetime.datetime.strptime(value[0][:-6], '%Y-%m-%dT%H:%M:%S.%f') end_time = datetime.datetime.strptime(value[1][:-6], '%Y-%m-%dT%H:%M:%S.%f') duration = str(end_time - start_time) else: duration = "00:00:00" description = "browser:Chromium Edge, source:%s, file_path:%s, os_account:%s, " \ "edge profile:%s" % (value[2], value[3]+value[4], value[5], value[6]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_app_web_whale_download': for value in result: event_type = "Web Download" event_time = value[0] if len(value[1]) != 0: start_time = datetime.datetime.strptime(value[0][:-6], '%Y-%m-%dT%H:%M:%S.%f') end_time = datetime.datetime.strptime(value[1][:-6], '%Y-%m-%dT%H:%M:%S.%f') duration = str(end_time - start_time) else: duration = "00:00:00" description = "browser:Whale, source:%s, file_path:%s, os_account:%s, whale profile:%s" \ % (value[2], value[3]+value[4], value[5], value[6]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_app_web_opera_download': for value in result: event_type = "Web Download" event_time = value[0] if len(value[1]) != 0: start_time = datetime.datetime.strptime(value[0][:-6], '%Y-%m-%dT%H:%M:%S.%f') end_time = datetime.datetime.strptime(value[1][:-6], '%Y-%m-%dT%H:%M:%S.%f') duration = str(end_time - start_time) else: duration = "00:00:00" description = "browser:Opera, source:%s, file_path:%s, os_account:%s, opera profile:%s" \ % (value[2], value[3]+value[4], value[5], value[6]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_app_web_firefox_download': for value in result: event_type = "Web Download" event_time = value[0] if len(value[1]) != 0: start_time = datetime.datetime.strptime(value[0][:-6], '%Y-%m-%dT%H:%M:%S.%f') end_time = datetime.datetime.strptime(value[1][:-6], '%Y-%m-%dT%H:%M:%S.%f') duration = str(end_time - start_time) else: duration = "00:00:00" description = "browser:Firefox, source:%s, file_path:%s, os_account:%s, " \ "firefox profile:%s" % (value[2], value[3], value[4], value[5]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_os_win_reg_usb_device': for value in result: event_type = "USB Connection" event_time = value[0] duration = "00:00:00" description = "source:%s, friendly_name:%s, serial_number:%s, device_description:%s, " \ "last_assigned_drive_letter:%s" \ % (value[1], value[2], value[3], value[4], value[5]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) if row[0] == 'lv1_os_win_event_logs_usb_devices': for value in result: event_type = "USB Connection" event_time = value[0] duration = "00:00:00" description = "manufacturer:%s, model:%s, serial_number:%s, event_id:%s, " \ "event_id_description:%s, source:%s"\ % (value[1], value[2], value[3], value[4], value[5], value[6]) insert_data.append(tuple([par_id, configuration.case_id, configuration.evidence_id, event_type, str(event_time), duration, description])) else: pass else: pass else: pass query = "Insert into lv2_timeline values (%s, %s, %s, %s, %s, %s, %s);" configuration.cursor.bulk_execute(query, insert_data)