def ParseImage(self, options): ''' Module to analyze the image file. This module parse the partition list in image file. ''' # Get Partition list in image file disk_scanner = scan_disk.DiskScanner() disk_info = disk_scanner.Analyze(self.path) # Insert Partition info db = carpe_db.Mariadb() db.open() for disk in disk_info: par_id = 'p1' + str(uuid.uuid4()).replace('-', '') par_name = str(disk['vol_name']) par_type = str(disk['type_indicator']) sector_size = str(disk['bytes_per_sector']) par_size = str(disk['length']) start_sector = str(disk['start_sector']) if par_type == 'VSHADOW' and options['vss'] != 'True': continue else: query = "INSERT INTO partition_info(par_id, par_name, evd_id, par_type, sector_size, par_size, start_sector) VALUES('" + par_id + "', '" + par_name + "', '" + self.evd_id + "', '" + par_type + "', '" + sector_size + "', '" + par_size + "', '" + start_sector + "');" db.execute_query(query) db.close() # Split VSS Partition if options['vss'] == 'True': output_writer = split_disk.FileOutputWriter(self.path) disk_spliter = split_disk.DiskSpliter(disk_info) disk_spliter.SplitDisk(output_writer)
def print_directory_entry(self, directory_entry, prefix="", path=None): meta = directory_entry.info.meta name = directory_entry.info.name name_type = "-" if name: name_type = self.FILE_TYPE_LOOKUP.get(int(name.type), "-") meta_type = "-" if meta: meta_type = self.META_TYPE_LOOKUP.get(int(meta.type), "-") directory_entry_type = "{0:s}/{1:s}".format(name_type, meta_type) for attribute in directory_entry: mtime = directory_entry.info.meta.mtime atime = directory_entry.info.meta.atime ctime = directory_entry.info.meta.crtime inode_type = int(attribute.info.type) if inode_type in self.ATTRIBUTE_TYPES_TO_PRINT: if self._fs_info.info.ftype in [ pytsk3.TSK_FS_TYPE_NTFS, pytsk3.TSK_FS_TYPE_NTFS_DETECT ]: inode = "{0:d}-{1:d}-{2:d}".format( meta.addr, int(attribute.info.type), attribute.info.id) else: inode = "{0:d}".format(meta.addr) attribute_name = attribute.info.name if attribute_name and attribute_name not in ["$Data", "$I30"]: #filename = name.name+":"+attribute.info.name filename = "{0:s}:{1:s}".format( (name.name).decode('utf-8', 'replace'), (attribute.info.name).decode('utf-8', 'replace')) else: filename = (name.name).decode('utf-8', 'replace') temp = str(filename) temp = temp.split(".") if (len(temp) == 1 or (str(directory_entry_type) == "d/d")): file_extension = "" else: file_extension = temp[-1] if meta and name: data = "{0:s}|{1:s}|{2:s}|{3:s}|{4:s}|{5:s}|{6:s}|{7:s}".format( str(directory_entry_type), str(inode), str("root/" + "/".join(path)), str(filename), str(mtime), str(atime), str(ctime), str(file_extension)) db_test = carpe_db.Mariadb() conn = db_test.open() query = db_test.query_builder("1", data, "file") data = db_test.execute_query(conn, query) db_test.close(conn)
def Parse(self, case_id, evd_id): """ Analyzes records related to Windows operating system installation information in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() # Connect Database db = carpe_db.Mariadb() db.open() # Parse Data query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] for value in values: val_type = value[0] val_data = value[1] query = "SELECT description, filename FROM log2timeline WHERE par_id='" + par[ 0] + "' and " + val_type + "='" + val_data + "';" results = db.execute_query_mul(query) if len(results) == 0: continue else: for result in results: data = (result[0].decode('utf8')).split('|`') _data = [] _data.append(par[0]) # par_id _data.append(case_id) # case_id _data.append(evd_id) # evd_id _data.append('Windows') # os_type _data.append(data[1].split(':')[1]) # product_name _data.append(data[3].split(':')[1]) # os_version _data.append( data[4].split(':')[1]) # build_version _data.append(data[5].split(':')[1]) # product_id _data.append(data[2].split(':')[1]) # release_id _data.append(data[6].split(':')[1]) # owner _data.append(data[7].split(':')[1]) # install_date _data.append(data[0][1:-1]) # source query = self.GetQuery('I', _data) db.execute_query(query) db.close()
def Parse(self, case_id, evd_id): """ Analyzes records related to Web Browser Download History in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['values'] if name == "Chrome Family": chromium_list = ["Chrome", "Opera", "Edge"] (source, sourcetype, timetype) = values[0] for chromium in chromium_list: query = """ SELECT sourcetype, type, datetime, description FROM log2timeline WHERE par_id = '%s' AND source = '%s' AND sourcetype = '%s' AND type = '%s' AND filename LIKE '%%%s%%' """ % (par[0], source, sourcetype, timetype, chromium) result = db.execute_query_mul(query) if result == -1: break for _sourcetype, _type, _datetime, _description in result: down_time = _datetime.strftime('%Y-%m-%d %H:%M:%S') browser_type = chromium time_type = "Download Time" (down_url, save_path, file_size) = self.GetChromiumDownload(_description.decode('utf-8')) insert_values = (par[0], case_id, evd_id, browser_type, time_type, down_time, down_url, save_path.replace("\\", "/"), file_size) query = self.InsertQuery(db, insert_values) db.execute_query(query) db.close()
def Parse(self, case_id, evd_id): """ Analyzes records related to Windows External Storage List """ # Check Table if not self.CheckTable(): self.CreateTable() # Connect Database db = carpe_db.Mariadb() db.open() # Parse Data query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] for value in values: val_type = value[0] val_data = value[1] query = "SELECT description, filename FROM log2timeline WHERE par_id='" + par[ 0] + "' and " + val_type + "='" + val_data + "';" results = db.execute_query_mul(query) if len(results) == 0: continue else: for result in results: data = (result[0].decode('utf8')).split('|`') _data = [] _data.append(par[0]) _data.append(case_id) _data.append(evd_id) _data.append(data[0].split(':')[1]) _data.append(data[1].split(':')[1]) _data.append(data[2].split(':')[1]) _data.append(data[3].split(':')[1]) _data.append(data[4].split(':')[1]) _data.append(result[1]) query = self.GetQuery('I', _data) db.execute_query(query) db.close()
def FileSystem_Analysis(self, case_no, evd_no, user_id): # Conenct Carpe Database db = carpe_db.Mariadb() db.open() # Get image file list query = 'SELECT file_path FROM tn_evidence_splitted WHERE case_no = ' + str(case_no) + ' and evd_no = ' + str(evd_no) + ';' image_list = db.execute_query(db._conn, query) db.close() # Temporary code for image in image_list: subprocess.call(['python', '../filesystem_analyzer/carpe_fls', 'option']) '''
def SysLogAndUserData_Analysis(self, case_no, evd_no, inv_no): # Conenct Carpe Database db = carpe_db.Mariadb() db.open() # Get image file list query = 'SELECT file_name, file_path FROM tn_evidence_splitted WHERE case_no = ' + str(case_no) + ' and evd_no = ' + str(evd_no) + ';' image_name, image_list = db.execute_query(db._conn, query) db.close() # Temporary code for name, image in image_name, image_list: subprocess.call(['python3.6', '../plaso_tool/log2timeline.py', name + '.plaso', image]) for name in image_name: subprocess.call(['python3.6', '../plaso_tool/psort.py', '-o', '4n6time_mariadb', name + '.plaso'])
def init_module(self, case_no, evd_no, inv_no): # Connect Carpe Database db = carpe_db.Mariadb() db.open() # Get Source Path query = 'SELECT evd_name, file_path FROM tn_evidence WHERE case_no = ' + str(case_no) + ' and evd_no = ' + str(evd_no) + ';' (self.evd_name, self.src_path) = db.execute_query(db._conn, query) # Get Case & Evidence Name query = 'SELECT case_name FROM tn_case WHERE case_no = ' + str(case_no) + ';' self.case_name = db.execute_query(db._conn, query) db.close() # Create directory to store splitted image self.dst_path = '/data/share/image' + '/' + self.case_name + '/' + self.evd_name + '/splitted'
def Analyze(self): db = carpe_db.Mariadb() db.open() query = "SELECT par_id FROM partition_info WHERE evd_id='" + self.evd_id + "';" par_list = db.execute_query_mul(query) tmp_par_list = list() for par in par_list: query = "SELECT count(*) FROM log2timeline WHERE par_id LIKE '%" + par[ 0] + "%';" if db.execute_query_mul(query)[0][0] != 0: tmp_par_list.append(par) db.close() for obj_name in self.ParserModuleObject.keys(): obj = self.ParserModuleObject[obj_name] obj.Parse(self.case_id, self.evd_id, tmp_par_list)
def SetModule(self, _case_id, _evd_id): self.case_id = _case_id self.evd_id = _evd_id db = carpe_db.Mariadb() db.open() query = "SELECT evd_path FROM evidence_info WHERE case_id='" + _case_id + "' AND evd_id='" + _evd_id + "';" #pdb.set_trace() self.path = os.path.join("/home/carpe/storage", db.execute_query(query)[0]) db.close() t_path_1 = os.path.join("/home/carpe/tmp", self.case_id) if not os.path.isdir(t_path_1): os.mkdir(t_path_1) self.tmp_path = os.path.join(t_path_1, self.evd_id) if not os.path.isdir(self.tmp_path): os.mkdir(self.tmp_path)
def Carving(self, option): manage = CarvingManager(debug=True, out="carving.log", table="carving_result") res = manage.execute(manage.Instruction.LOAD_MODULE) if (res == False): return manage.Return.EIOCTL manage.execute(manage.Instruction.POLICY, { "enable": True, "save": True }) db = carpe_db.Mariadb() db.open() manage.carpe_connect_master(db) # Get image file list query = "SELECT par_id, sector_size, start_sector, par_size FROM partition_info WHERE evd_id='" + self.evd_id + "' ORDER BY start_sector;" par_infos = db.execute_query_mul(query) for par_info in par_infos: desti = self.tmp_path + os.sep + par_info[0] if (os.path.exists(desti) == False): os.mkdir(desti) manage.execute( manage.Instruction.PARAMETER, { "p_id": par_info[0], "block": 8 * int(par_info[1]), "sector": par_info[1], "start": par_info[2], "path": self.path, "dest": desti + "{0}data_carving".format(os.sep), "end": par_info[3] }) manage.execute(manage.Instruction.EXEC) manage.execute(manage.Instruction.EXPORT_CACHE_TO_DB) manage.execute(manage.Instruction.DISCONNECT_DB)
def ParseFilesystem(self): fs = carpe_fs_analyzer.CARPE_FS_Analyze() db = carpe_db.Mariadb() db.open() query = "SELECT sub_type FROM evidence_info WHERE evd_id='" + self.evd_id + "';" image_format = str(db.execute_query(query)[0]).lower() fs.open_image(image_format, self.path) query = "SELECT par_id, sector_size, start_sector FROM partition_info WHERE evd_id='" + self.evd_id + "';" par_info = db.execute_query_mul(query) for par in par_info: par_id = str(par[0]) #print(par_id) sector_size = int(str(par[1])) start_sector = int(str(par[2])) ret = fs.open_file_system((sector_size * start_sector)) if ret == -1: print(par_id) continue fs.fs_info(par_id) #unalloc fs_alloc_info = fs.block_alloc_status() fs_alloc_info._p_id = par_id for i in fs_alloc_info._unallock_blocks: query = db.insert_query_builder("block_info") query = (query + " values ('" + str(i[1]) + "','" + par_id + "','" + str(i[0]) + "');") db.execute_query(query) db.commit() directory = fs.open_directory(None) fs.list_directory(directory, [], [], db) db.close() return True
def SysLogAndUserData_Analysis(self): # connect CARPE Database db = carpe_db.Mariadb() db.open() # Get image file list query = "SELECT par_id, par_name FROM partition_info WHERE evd_id='" + self.evd_id + "' ORDER BY start_sector;" par_infos = db.execute_query_mul(query) db.close() # Call Log2Timeline & Psort Tool for par_info in par_infos: p_id = str(par_info[0]) p_name = str(par_info[1]) storage_path = os.path.join(self.tmp_path, (p_id + ".plaso")) print(p_id) if p_id == 'p194e18781ce5b4e94a90d4591e3264b5a': print("pass!!!") continue # test subprocess.call([ 'python3.6', '/home/barley/CARPE/plaso_tool/carpe_l2t.py', '--no_vss', '--hashers', 'None', storage_path, self.path, p_name ]) print('l2t end') if os.path.exists(storage_path): subprocess.call([ 'python3.6', '/home/barley/CARPE/plaso_tool/carpe_psort.py', '-o', '4n6time_maria', '--server', '127.0.0.1', '--port', '3306', '--user', 'root', '--password', 'dfrc4738', '--db_name', 'carpe', '--case_id', str(self.case_id), '--evd_id', str(self.evd_id), '--par_id', p_id, storage_path ])
def Parse(self, case_id, evd_id, par_list): """ Analyzes records related to usb all device in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() ''' query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) ''' for par in par_list: query = r"SELECT count(*) FROM log2timeline WHERE par_id LIKE '%" + par[ 0] + "%'" if db.execute_query_mul(query)[0][0] != 0: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "USERACCOUNTS": # TODO : Fix a Chorme Cache plugin -> lack information user_list = [] user_count = 0 #Select Default OS query = r"SELECT sourcetype, type, user, description, datetime FROM log2timeline WHERE sourcetype like '%User Account Information%' or description like '%Microsoft#\\Windows NT#\\CurrentVersion#\\ProfileList%' escape '#' and filename not like '%SafeOS%'" result_query = db.execute_query_mul(query) try: for result_data in result_query: result_data_sep = result_data[3].decode( 'utf-8').replace(r"|`", " ") user_information = User_Information() if 'User Account Information' in str( result_data[0] ) and str( result_data[1] ) == 'Content Modification Time' and str( result_data[2]) is not None: user_list.append(user_information) user_list[ user_count].type_of_user = '******' if 'Comments' in result_data_sep: dataInside = r"Username: (.*) Comments: (.*) RID: ([\d]*) Login count: ([\d]*)" m = re.search(dataInside, result_data_sep) user_list[ user_count].user_name = m.group(1) user_list[ user_count].account_description = m.group( 2) user_list[ user_count].security_identifier = m.group( 3) user_list[ user_count].login_count = m.group( 4) user_count = user_count + 1 else: if 'Full name' in result_data_sep: dataInside = r"Username: (.*) Full name: (.*) RID: ([\d]*) Login count: ([\d]*)" m = re.search( dataInside, result_data_sep) user_list[ user_count].user_name = m.group( 1) user_list[ user_count].full_name = m.group( 2) user_list[ user_count].security_identifier = m.group( 3) user_list[ user_count].login_count = m.group( 4) user_count = user_count + 1 else: dataInside = r"Username: (.*) RID: ([\d]*) Login count: ([\d]*)" m = re.search( dataInside, result_data_sep) user_list[ user_count].user_name = m.group( 1) user_list[ user_count].security_identifier = m.group( 2) user_list[ user_count].login_count = m.group( 3) user_count = user_count + 1 if 'S-1-5-18' in result_data_sep: user_list.append(user_information) user_list[ user_count].security_identifier = 'S-1-5-18' dataInside = r"ProfileImagePath: \[REG_EXPAND_SZ\] (.*) RefCount:" m = re.search(dataInside, result_data_sep) user_list[ user_count].profile_path = m.group(1) user_list[ user_count].type_of_user = '******' user_count = user_count + 1 if 'S-1-5-19' in result_data_sep: user_list.append(user_information) user_list[ user_count].security_identifier = 'S-1-5-19' dataInside = r"ProfileImagePath: \[REG_EXPAND_SZ\] (.*) State:" m = re.search(dataInside, result_data_sep) user_list[ user_count].profile_path = m.group(1) user_list[ user_count].type_of_user = '******' user_count = user_count + 1 if 'S-1-5-20' in result_data_sep: user_list.append(user_information) user_list[ user_count].Security_Identifier = 'S-1-5-20' dataInside = r"ProfileImagePath: \[REG_EXPAND_SZ\] (.*) State:" m = re.search(dataInside, result_data_sep) user_list[ user_count].profile_path = m.group(1) user_list[ user_count].type_of_user = '******' user_count = user_count + 1 except: print("MAX-USERACCOUNT-error") for result_data in result_query: if 'Last Login Time' == result_data[1]: for user in user_list: if result_data[2] == user.user_name: user.last_login_time = result_data[4] if 'Last Password Reset' == result_data[1]: for user in user_list: if result_data[2] == user.user_name: user.last_password_change_time = result_data[ 4] if 'S-1-5-21' in result_data[3].decode('utf-8'): dataInside = r"ProfileList\\(.*)\] Flags:(.*)ProfileImagePath: \[REG_EXPAND_SZ\] (.*)\\(.*) ProfileLoadTimeHigh:" m = re.search(dataInside, result_data[3].decode('utf-8')) for user in user_list: if m.group(4) == user.user_name: user.security_identifier = m.group(1) user.profile_path = m.group( 3) + "\\" + m.group(4) for user in user_list: insert_values = ( par[0], case_id, evd_id, str(user.user_name), str(user.full_name), str(user.type_of_user), str(user.account_description), str(user.security_identifier), str(user.user_group), str(user.login_script), str(user.profile_path), str(user.last_login_time), str(user.last_password_change_time), str(user.last_incorrect_password_login_time), str(user.login_count), str(user.account_disabled), str(user.password_required), str(user.password_hint), str(user.lm_hash), str(user.ntlm_hash)) self.InsertQuery(db, insert_values) db.close() now = datetime.now() print('[%s-%s-%s %s:%s:%s] USER ACCOUNTS DONE' % (now.year, now.month, now.day, now.hour, now.minute, now.second))
def Parse(self, case_id, evd_id): """ Analyzes records related to Web Browser Cache in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "Chrome Family": # TODO : Fix a Chorme Cache plugin -> lack information chromium_list = ["Chrome", "Opera", "Edge"] (source, sourcetype, timetype) = values[0] for chromium in chromium_list: query = """ SELECT sourcetype, type, datetime, description FROM log2timeline WHERE par_id = '%s' AND source = '%s' AND sourcetype = '%s' AND type = '%s' AND filename LIKE '%%%s%%' """ % (par[0], source, sourcetype, timetype, chromium) result = db.execute_query_mul(query) if result == -1: break for _sourcetype, _type, _datetime, _description in result: time = _datetime.strftime('%Y-%m-%d %H:%M:%S') browser_type = chromium time_type = _type result = self.GetChromiumCache( _description.decode()) if result == False: (cache_url, cache_name, cache_size, mime_type) = ("None", "None", "None", "None") else: (cache_url, cache_name, cache_size, mime_type) = (result, "None", "None", "None") insert_values = (par[0], case_id, evd_id, browser_type, time_type, time, cache_url, cache_name, cache_size, mime_type) self.InsertQuery(db, insert_values) # elif name == "Firefox": # # TODO : Fix a FireFox Cache plugin # # (source, sourcetype, timetype) = values[0] # # query = """ SELECT sourcetype, type, datetime, description # FROM log2timeline # WHERE par_id = '%s' AND # source = '%s' AND # sourcetype = '%s' AND # type = '%s' # """ % (par[0], source, sourcetype, timetype) # # result = db.execute_query_mul(query) # for _sourcetype, _type, _datetime, _description in result: # visit_time = _datetime # elif name == "Internet Explorer index.dat": # # TODO : Fix a Index.dat parser # (sourcetype, browser_artifact_type, format) = values[0] # # query = """ SELECT sourcetype, type, datetime, description # FROM log2timeline # WHERE par_id = '%s' AND # sourcetype = '%s' AND # filename LIKE '%%%s%%' AND # format = '%s' # """ % (par[0], sourcetype, browser_artifact_type, format) # print(query) # # result = db.execute_query_mul(query) # if result == -1: # break # # for _sourcetype, _type, _datetime, _description in result: # time = _datetime.strftime('%Y-%m-%d %H:%M:%S') # time_type = _type # browser_type = "Internet Explorer (~9)" # # #self.GetMSIECF(_description, _extra) # elif name == "Internet Explorer WebCacheV##.dat": # TODO : Description Parsing # pass db.close()
def Parse(self, case_id, evd_id): """ Analyzes records related to Web Browser Cookies in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() query = "SELECT par_id FROM partition_info WHERE evd_id='" + evd_id + "';" par_list = db.execute_query_mul(query) for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['values'] if name == "Chrome Family": chromium_list = ["Chrome", "Opera", "Edge"] (source, sourcetype) = values[0] for chromium in chromium_list: query = """ SELECT sourcetype, type, datetime, description, extra FROM log2timeline WHERE par_id = '%s' AND source = '%s' AND sourcetype = '%s' AND filename LIKE '%%%s%%' """ % (par[0], source, sourcetype, chromium) result = db.execute_query_mul(query) if result == -1: break for _sourcetype, _type, _datetime, _description, _extra in result: time = _datetime.strftime('%Y-%m-%d %H:%M:%S') time_type = _type browser_type = chromium result = self.GetChromiumCookie( _description.decode(), _extra) (host_url, path, cookie_key, cookie_value) = result insert_values = (par[0], case_id, evd_id, browser_type, time_type, time, host_url, path, cookie_key, cookie_value) self.InsertQuery(db, insert_values) elif name == "Firefox": (source, sourcetype) = values[0] query = """ SELECT sourcetype, type, datetime, description, extra FROM log2timeline WHERE par_id = '%s' AND source = '%s' AND sourcetype = '%s' """ % (par[0], source, sourcetype) result = db.execute_query_mul(query) if result == -1: break for _sourcetype, _type, _datetime, _description, _extra in result: time = _datetime.strftime('%Y-%m-%d %H:%M:%S') time_type = _type browser_type = "Firefox" result = self.GetChromiumCookie( _description.decode(), _extra) (host_url, path, cookie_key, cookie_value) = result insert_values = (par[0], case_id, evd_id, browser_type, time_type, time, host_url, path, cookie_key, cookie_value) self.InsertQuery(db, insert_values) elif name == "Internet Explorer index.dat": # TODO : Fix a Index.dat(Cookie) parser (sourcetype, browser_artifact_type, format) = values[0] query = """ SELECT sourcetype, type, datetime, description FROM log2timeline WHERE par_id = '%s' AND sourcetype = '%s' AND filename LIKE '%%%s%%' AND format = '%s' """ % (par[0], sourcetype, browser_artifact_type, format) result = db.execute_query_mul(query) if result == -1: break for _sourcetype, _type, _datetime, _description in result: time = _datetime.strftime('%Y-%m-%d %H:%M:%S') time_type = _type browser_type = "Internet Explorer (~9)" (path, cookie_key, cookie_value) = ("None", "None", "None") host_url = self.GetMSIECFCookie(_description.decode()) insert_values = (par[0], case_id, evd_id, browser_type, time_type, time, host_url, path, cookie_key, cookie_value) self.InsertQuery(db, insert_values) # elif name == "Internet Explorer WebCacheV##.dat": # TODO : Description Parsing # pass db.close()
def Analyze_Documents(self): # connect Carpe Database db = carpe_db.Mariadb() db.open() data = MappingDocuments.MappingDocuments() # Get Case Information # query = "SELECT * FROM case_info where administrator = 'jung byeongchan'" # case = db.execute_query(query) data.case_id = self.case_id query = "SELECT case_name FROM case_info where case_id ='%s';" % self.case_id case = db.execute_query(query) data.case_name = case[0] # Get Evidence Information query = "SELECT * FROM evidence_info where case_id='" + self.case_id + "' and evd_id='" + self.evd_id + "';" evidence = db.execute_query(query) data.evdnc_id = evidence[0] data.evdnc_name = evidence[1] evdnc_path = self.path data.sha1_hash = evidence[11] # ole object save path #work_dir = "/home/carpe/tmp/" + self.case_id + "/" + self.evd_id + "/documents" data.work_dir = "/home/carpe/defa_temp" + os.sep + self.case_id if not os.path.exists(data.work_dir): os.mkdir(data.work_dir) data.work_dir = "/home/carpe/defa_temp" + os.sep + self.case_id + os.sep + self.evd_id if not os.path.exists(data.work_dir): os.mkdir(data.work_dir) # Get Partition List query = "SELECT par_id, sector_size, start_sector, par_size FROM partition_info WHERE evd_id='" + self.evd_id + "' ORDER BY start_sector;" par_info = db.execute_query_mul(query) for par in par_info: # Get Document List query = "SELECT * FROM file_info WHERE extension in ('pdf', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'hwp') and par_id='" + par[ 0] + "' ORDER BY file_id;" document_files = db.execute_query_mul(query) if len(document_files): data.work_dir = "/home/carpe/defa_temp" + os.sep + self.case_id + os.sep + self.evd_id + os.sep + par[ 0] if not os.path.exists(data.work_dir): os.mkdir(data.work_dir) data.work_dir = "/home/carpe/defa_temp" + os.sep + self.case_id + os.sep + self.evd_id + os.sep + par[ 0] + os.sep + "documents/" if not os.path.exists(data.work_dir): os.mkdir(data.work_dir) data.ole_path = "/home/carpe/defa_temp" + os.sep + self.case_id + os.sep + self.evd_id + os.sep + par[ 0] + os.sep + "documents/" file_list = [list(doc) for doc in document_files] tuple = [] tuple_list = [] for idx in range(len(file_list)): file_list[idx].append(data.work_dir + file_list[idx][4]) tuple = [file_list[idx][4], file_list[idx][1]] tuple_list.append(tuple) fileExpoter = carpe_file_extractor.Carpe_File_Extractor() fileExpoter.setConfig(data.work_dir, evdnc_path, (par[1] * par[2]), tuple_list) fileExpoter.extract() P3_Manager.run_daemon(data, file_list) print() db.close()
def Parse(self, case_id, evd_id, par_list): """ Analyzes records related to usb all device in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "AMCACHE_FILE": # TODO : Fix a Chorme Cache plugin -> lack information file_list = [] file_count = 0 query = r"SELECT description, datetime FROM log2timeline WHERE description like '%Root#\\InventoryApplicationFile#\\%' escape '#' or description like '%Root#\\InventoryApplication#\\%' escape '#' or description like '%Root#\\File#\\%' escape '#' or description like '%Root#\\Programs#\\%' escape '#'" result_query = db.execute_query_mul(query) try: for result_data in result_query: # Current result_data_sep = result_data[0].decode( 'utf-8').replace(r"|`", " ") if 'Root\\InventoryApplicationFile\\' in result_data_sep and 'empty' not in result_data_sep: file_information = File_Information() file_list.append(file_information) file_list[ file_count].key_last_updated_time = result_data[ 1] file_list[file_count].legacy_flag = 0 if 'BinFileVersion' in result_data_sep: dataInside = r"\[\\Root\\InventoryApplicationFile\\(.*)\] BinFileVersion: \[REG_SZ\] (.*) BinProductVersion: \[REG_SZ\] (.*) BinaryType: \[REG_SZ\] (.*) FileId: \[REG_SZ\] (.*) IsOsComponent:" m = re.search(dataInside, result_data_sep) file_list[file_count].key_id = m.group(1) file_list[ file_count].binary_file_version = m.group( 2) file_list[ file_count].binary_product_version = m.group( 3) file_list[ file_count].binary_type = m.group(4) file_list[file_count].sha1_hash = m.group( 5) if 'IsOsComponent' in result_data_sep: dataInside = r" IsOsComponent: \[REG_DWORD_LE\] ([\d])" m = re.search(dataInside, result_data_sep) if m.group(1) == '0': file_list[ file_count].os_component_flag = 'False' else: file_list[ file_count].os_component_flag = 'True' if 'IsPeFile' in result_data_sep: dataInside = r" IsPeFile: \[REG_DWORD_LE\] ([\d])" m = re.search(dataInside, result_data_sep) if m.group(1) == '0': file_list[ file_count].pe_file_flag = 'False' else: file_list[ file_count].pe_file_flag = 'True' if 'Language' in result_data_sep: dataInside = r" Language: \[REG_DWORD_LE\] ([\d]*)" m = re.search(dataInside, result_data_sep) file_list[ file_count].language_code = m.group( 1) if 'LinkDate' in result_data_sep: dataInside = r" LinkDate: \[REG_SZ\] (.*) LongPathHash:" m = re.search(dataInside, result_data_sep) file_list[ file_count].link_date = m.group(1) if 'LongPathHash' in result_data_sep: dataInside = r" LongPathHash: \[REG_SZ\] (.*) LowerCaseLongPath:" m = re.search(dataInside, result_data_sep) file_list[ file_count].long_path_hash = m.group( 1) if 'LowerCaseLongPath' in result_data_sep: dataInside = r" LowerCaseLongPath: \[REG_SZ\] (.*)\.(.*) Name:" m = re.search(dataInside, result_data_sep) file_list[ file_count].long_path_hash = m.group( 1) file_list[ file_count].full_path = m.group( 1) + '.' + m.group(2) file_list[ file_count].file_extension = m.group( 2) if 'ProductName' in result_data_sep: dataInside = r" ProductName: \[REG_SZ\] (.*) ProductVersion:" m = re.search(dataInside, result_data_sep) file_list[ file_count].product_name = m.group( 1) if 'ProductVersion' in result_data_sep: dataInside = r" ProductVersion: \[REG_SZ\] (.*) ProgramId:" m = re.search(dataInside, result_data_sep) file_list[ file_count].product_version = m.group( 1) if 'ProgramId' in result_data_sep: dataInside = r" ProgramId: \[REG_SZ\] (.*) Publisher:" m = re.search(dataInside, result_data_sep) file_list[ file_count].program_id = m.group(1) if 'Publisher' in result_data_sep: dataInside = r" Publisher: \[REG_SZ\] (.*) Size:" m = re.search(dataInside, result_data_sep) file_list[file_count].usn = m.group(1) if 'Size' in result_data_sep: dataInside = r" Size: \[REG_QWORD\] (.*) Usn:" m = re.search(dataInside, result_data_sep) file_list[file_count].size = m.group(1) if 'Usn' in result_data_sep: dataInside = r" Usn: \[REG_QWORD\] (.*) Version:" m = re.search(dataInside, result_data_sep) file_list[file_count].usn = m.group(1) if 'Version' in result_data_sep: dataInside = r" Version: \[REG_SZ\] (.*)" m = re.search(dataInside, result_data_sep) file_list[ file_count].version = m.group(1) else: dataInside = r"\[\\Root\\InventoryApplicationFile\\(.*)\] BinaryType: \[REG_SZ\] (.*) FileId: \[REG_SZ\] (.*) LongPathHash: \[REG_SZ\] (.*) LowerCaseLongPath: \[REG_SZ\] (.*)\.(.*) ProgramId: \[REG_SZ\] (.*) Size: \[REG_SZ\] (.*)" m = re.search(dataInside, result_data_sep) file_list[file_count].key_id = m.group(1) file_list[ file_count].binary_type = m.group(2) file_list[file_count].sha1_hash = m.group( 3) file_list[ file_count].long_path_hash = m.group(4) file_list[file_count].full_path = m.group( 5) + '.' + m.group(6) file_list[ file_count].file_extension = m.group(6) file_list[file_count].program_id = m.group( 7) file_list[file_count].size = m.group(8) for result_data in result_query: if '\\Root\\InventoryApplication\\' in result_data_sep and file_list[ file_count].program_id in result_data_sep: dataInside = r"Name: \[REG_SZ\] (.*) OSVersionAtInstallTime:" m = re.search(dataInside, result_data_sep) file_list[ file_count].program_name = m.group( 1) break file_count = file_count + 1 # Legaxy if 'Root\\File\\' in result_data_sep and 'empty' not in result_data_sep: try: if ' 0:' in result_data_sep: file_information = File_Information() file_list.append(file_information) file_list[ file_count].key_last_updated_time = result_data[ 1] file_list[file_count].legacy_flag = 1 dataInside = r"\[\\Root\\File\\(.*)\\(.*) 0: \[REG_SZ\] (.*) 1: \[REG_SZ\] (.*) 10: (.*) 100: \[REG_SZ\] (.*) 101: \[REG_SZ\] (.*) 11: \[REG_QWORD\] (.*) 12: \[REG_QWORD\] (.*) 15: \[REG_SZ\] (.*)\\(.*)\.(.*) 16: (.*) 17: \[REG_QWORD\] (.*) 2: \[REG_SZ\] (.*) 3: \[REG_DWORD_LE\] (.*) 4: \[REG_QWORD\] (.*) 5: \[REG_SZ\] (.*) 6: \[REG_DWORD_LE\] (.*) 7: \[REG_DWORD_LE\] (.*) 8: \[REG_SZ\] (.*) 9: \[REG_DWORD_LE\] (.*) a: (.*) c: \[REG_SZ\] (.*) d: (.*) f: \[REG_DWORD_LE\] (.*)" m = re.search(dataInside, result_data_sep) file_list[ file_count].volume_guid = m.group( 1) file_list[file_count].key_id = m.group( 1) + '\\' + m.group(2) file_list[ file_count].product_name = m.group( 3) file_list[ file_count].publisher = m.group(4) file_list[ file_count].program_id = m.group(6) file_list[ file_count].sha1_hash = m.group(7) file_list[ file_count].last_modified_time = m.group( 8) file_list[ file_count].created_time = m.group( 9) file_list[ file_count].full_path = m.group( 10) + '\\' + m.group( 11) + '.' + m.group(12) file_list[ file_count].file_name = m.group( 11) + '.' + m.group(12) file_list[ file_count].file_extension = '.' + m.group( 12) file_list[ file_count].last_modified_2_time = m.group( 14) file_list[ file_count].product_version = m.group( 15) file_list[ file_count].language_code = m.group( 16) file_list[ file_count].switch_back_context = m.group( 17) file_list[ file_count].version = m.group(18) file_list[file_count].size = m.group( 19) file_list[ file_count].pe_header_field_size_of_image = m.group( 20) file_list[ file_count].pe_header_hash = m.group( 21) file_list[ file_count].pe_header_checksum = m.group( 22) file_list[ file_count].description = m.group( 24) file_list[ file_count].link_date = m.group(26) file_count = file_count + 1 if ' 0:' not in result_data_sep: file_information = File_Information() file_list.append(file_information) file_list[ file_count].key_last_updated_time = result_data[ 1] file_list[file_count].legacy_flag = 1 dataInside = r"\[\\Root\\File\\(.*)\\(.*) 100: \[REG_SZ\] (.*) 15: \[REG_SZ\] (.*)\\(.*)\.(.*) 17: \[REG_QWORD\] (.*)" m = re.search(dataInside, result_data_sep) file_list[ file_count].volume_guid = m.group( 1) file_list[file_count].key_id = m.group( 1) + '\\' + m.group(2) file_list[ file_count].program_id = m.group(3) file_list[ file_count].full_path = m.group( 4) + '\\' + m.group( 5) + '.' + m.group(6) file_list[ file_count].file_name = m.group( 5) + '.' + m.group(6) file_list[ file_count].file_extension = '.' + m.group( 6) file_list[ file_count].last_modified_2_time = m.group( 7) for result_data in result_query: if '\\Root\\Programs\\' in result_data_sep and file_list[ file_count].program_id in result_data_sep: dataInside = r" 0: \[REG_SZ\] (.*) 1:" m = re.search( dataInside, result_data_sep) file_list[ file_count].program_name = m.group( 1) break file_count = file_count + 1 except: print("MAX-AMCAHCE-error") except: print("MAX-AMCAHCE-error") for file in file_list: insert_values = ( par[0], case_id, evd_id, str(file.file_name), str(file.key_last_updated_time), str(file.file_extension), str(file.program_name), str(file.program_id), str(file.key_id), str(file.sha1_hash), str(file.os_component_flag), str(file.full_path), str(file.link_date), str(file.product_name), str(file.size), str(file.version), str(file.product_version), str(file.long_path_hash), str(file.binary_type), str(file.pe_file_flag), str(file.binary_file_version), str(file.binary_product_version), str(file.language_code), str(file.usn), str(file.legacy_flag), str(file.volume_guid), str(file.publisher), str(file.switch_back_context), str(file.description), str(file.last_modified_time), str(file.created_time), str(file.last_modified_2_time), str(file.pe_header_field_size_of_image), str(file.pe_header_hash), str(file.pe_header_checksum)) self.InsertQuery(db, insert_values) db.close() now = datetime.now() print('[%s-%s-%s %s:%s:%s] AmCache File DONE' % (now.year, now.month, now.day, now.hour, now.minute, now.second))
def Parse(self, case_id, evd_id, par_list): """ Analyzes records related to usb all device in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "USB_all_device": # TODO : Fix a Chorme Cache plugin -> lack information query = r"SELECT description, datetime FROM log2timeline WHERE (description like '%0064%' or description like '%0065%' or description like '%0066%' or description like '%0067%') and (description like '%SCSI#\%' escape '#' or description like '%USB#\%' escape '#' or description like '%USBSTOR#\%' escape '#' or description like '%STORAGE#\%' escape '#' or description like '%WPDBUSENUM#\%' escape '#')" result_final = db.execute_query_mul(query) device_list = [] device_count = 0 query = "SELECT description, datetime FROM log2timeline WHERE (description like '%SCSI#\%' escape '#' or description like '%WPDBUSENUM#\%' escape '#' or description like '%USB#\%' escape '#' or description like '%USBSTOR#\%' escape '#' or description like '%STORAGE#\%' escape '#' or description like '%Windows Portable Devices#\\Devices#\\SWD%' escape '#') and description like '%Mfg:%' or description like '%USB#\%' escape '#' and description like '%label:%'" #query2 = "SELECT description, datetime FROM log2timeline WHERE (description like '%SCSI#\%' escape '#' or description like '%WPDBUSENUM#\%' escape '#' or description like '%USB#\%' escape '#' or description like '%USBSTOR#\%' escape '#' or description like '%STORAGE#\%' escape '#' or description like '%Windows Portable Devices#\\Devices#\\SWD%' escape '#') and description like '%Mfg:%'" #result = db.execute_query_mul(query2) result = db.execute_query_mul(query) if len(result) < 1: break else: for result_data in result: device_information = Device_information() try: if 'SCSI\\' in result_data[0].decode('utf-8'): if 'Address:' not in result_data[0].decode( 'utf-8'): dataInside = r"SCSI\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) (.*) Driver: (.*)FriendlyName: \[REG_SZ\] (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 7) device_list[ device_count].manufacturer = m.group( 9) device_list[device_count].type = 'SCSI' device_list[ device_count].last_connected_time = result_data[ 1] else: dataInside = r"SCSI\\(.*)\\(.*)\] Address(.*) DeviceDesc: \[REG_SZ\] (.*) (.*) Driver: (.*)FriendlyName: \[REG_SZ\] (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 7) device_list[ device_count].manufacturer = m.group( 9) device_list[device_count].type = 'SCSI' device_list[ device_count].last_connected_time = result_data[ 1] elif 'WPDBUSENUM\\' in result_data[0].decode( 'utf-8'): dataInside = r"WPDBUSENUM\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*)FriendlyName: \[REG_SZ\] (.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 1) device_list[ device_count].device_description = m.group( 3) if '\\' in m.group(5): device_list[ device_count].friendly_name = m.group( 5) + "\\" else: device_list[ device_count].friendly_name = m.group( 5) device_list[ device_count].manufacturer = m.group(6) device_list[device_count].type = 'SWD' device_list[ device_count].last_connected_time = result_data[ 1] elif 'USB\\' in result_data[0].decode('utf-8'): if 'Address:' in result_data[0].decode( 'utf-8'): if 'FriendlyName' in result_data[ 0].decode('utf-8'): dataInside = r"USB\\(.*)\\(.*)\] Address(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*) FriendlyName: \[REG_SZ\] (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 6) device_list[ device_count].manufacturer = m.group( 8) device_list[ device_count].type = 'USB' device_list[ device_count].last_connected_time = result_data[ 1] else: dataInside = r"USB\\(.*)\\(.*)\] Address(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 7) device_list[ device_count].type = 'USB' device_list[ device_count].last_connected_time = result_data[ 1] else: dataInside1 = r"USB\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*) Mfg: \[REG_SZ\] (.*) P" dataInside2 = r"USB\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*) FriendlyName: \[REG_SZ\] (.*) HardwareID: (.*) Mfg: \[REG_SZ\] (.*) S" dataInside3 = r"USB\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*) Mfg: \[REG_SZ\] (.*) S" for d in range(0, 3): m = re.search( dataInside1, result_data[0].decode('utf-8')) if m == None: m = re.search( dataInside2, result_data[0].decode( 'utf-8')) if m == None: m = re.search( dataInside3, result_data[0].decode( 'utf-8')) device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 6) device_list[ device_count].type = 'USB' device_list[ device_count].last_connected_time = result_data[ 1] break else: device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 6) device_list[ device_count].manufacturer = m.group( 8) device_list[ device_count].type = 'USB' device_list[ device_count].last_connected_time = result_data[ 1] break else: device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 6) device_list[ device_count].type = 'USB' device_list[ device_count].last_connected_time = result_data[ 1] break elif 'USBSTOR\\' in result_data[0].decode( 'utf-8'): if 'Address:' in result_data[0].decode( 'utf-8'): dataInside = r"USBSTOR\\(.*)\\(.*)\] Address(.*) DeviceDesc: \[REG_SZ\] (.*) (.*) Driver: (.*)FriendlyName: \[REG_SZ\] (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 7) device_list[ device_count].manufacturer = m.group( 9) device_list[ device_count].type = 'USBSTOR' device_list[ device_count].last_connected_time = result_data[ 1] else: dataInside = r"USBSTOR\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) (.*) Driver: (.*)FriendlyName: \[REG_SZ\] (.*) Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 1) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].friendly_name = m.group( 7) device_list[ device_count].manufacturer = m.group( 9) device_list[ device_count].type = 'USBSTOR' device_list[ device_count].last_connected_time = result_data[ 1] elif 'STORAGE\\' in result_data[0].decode( 'utf-8'): if 'Address:' in result_data[0].decode( 'utf-8'): dataInside = r"STORAGE\\(.*)\\(.*)\] Address(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*)Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" m = re.search( dataInside, result_data[0].decode('utf-8')) device_list.append(device_information) device_list[ device_count].device_class_id = m.group( 2) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 7) device_list[ device_count].type = 'STORAGE' device_list[ device_count].last_connected_time = result_data[ 1] else: dataInside1 = r"STORAGE\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*)Hard(.*) Mfg: \[REG_SZ\] (.*) Service:" dataInside2 = r"STORAGE\\(.*)\\(.*)\] Cap(.*) DeviceDesc: \[REG_SZ\] (.*) Driver: (.*)Hard(.*) Mfg: \[REG_SZ\] (.*)" for d in range(0, 2): m = re.search( dataInside1, result_data[0].decode('utf-8')) if m == None: m = re.search( dataInside2, result_data[0].decode( 'utf-8')) device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 2) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 7) device_list[ device_count].type = 'STORAGE' device_list[ device_count].last_connected_time = result_data[ 1] break else: device_list.append( device_information) device_list[ device_count].device_class_id = m.group( 2) device_list[ device_count].serial_number = m.group( 2) device_list[ device_count].device_description = m.group( 4) device_list[ device_count].manufacturer = m.group( 7) device_list[ device_count].type = 'STORAGE' device_list[ device_count].last_connected_time = result_data[ 1] break device_count = device_count + 1 except: print("MAX-USB-error") for result_data in result: try: if 'Windows Portable Devices\\Devices\\SWD' in result_data[ 0].decode('utf-8'): dataInside = r"SWD#WPDBUSENUM#(.*)\] FriendlyName: \[REG_SZ\] (.*)" m = re.search(dataInside, result_data[0].decode('utf-8')) for device in device_list: if m.group(1).lower( ) == device.device_class_id: device.last_assigned_drive_letter = m.group( 2) + "\\" break except: print("MAX-USB" + device.device_class_id + "-error") for result_data in result: try: if 'USB\\' in result_data[0].decode( 'utf-8' ) and 'Label:' in result_data[0].decode('utf-8'): dataInside = r"Enum\\USB\\(.*)\\(.*)\\Device Parameters(.*)Label: \[REG_SZ\] (.*) PortableDeviceType:" m = re.search(dataInside, result_data[0].decode('utf-8')) for device in device_list: if m.group( 1 ) == device.device_class_id and m.group( 2) == device.serial_number: device.friendly_name = m.group(4) break except: print(device.device_class_id + "result_labe_error") for device in device_list: for time_loop in result_final: if '0064]' in time_loop[0].decode('utf-8'): if device.device_class_id in time_loop[ 0].decode( 'utf-8' ) and device.serial_number in time_loop[ 0].decode( 'utf-8' ) and device.type in time_loop[ 0].decode('utf-8'): device.driver_install_time = time_loop[1] elif '0065]' in time_loop[0].decode('utf-8'): if device.device_class_id in time_loop[ 0].decode( 'utf-8' ) and device.serial_number in time_loop[ 0].decode( 'utf-8' ) and device.type in time_loop[ 0].decode('utf-8'): device.first_install_time = time_loop[1] elif '0066]' in time_loop[0].decode('utf-8'): if device.device_class_id in time_loop[ 0].decode( 'utf-8' ) and device.serial_number in time_loop[ 0].decode( 'utf-8' ) and device.type in time_loop[ 0].decode('utf-8'): device.last_insertion_time = time_loop[1] elif '0067]' in time_loop[0].decode('utf-8'): if device.device_class_id in time_loop[ 0].decode( 'utf-8' ) and device.serial_number in time_loop[ 0].decode( 'utf-8' ) and device.type in time_loop[ 0].decode('utf-8'): device.last_removal_time = time_loop[1] for device in device_list: insert_values = (par[0], case_id, evd_id, str(device.device_class_id), str(device.serial_number), str(device.type), str(device.last_connected_time), str(device.device_description), str(device.friendly_name), str(device.manufacturer), str(device.last_assigned_drive_letter), str(device.volume_GUID), str(device.volume_serial_number_decimal), str(device.volume_serial_number_hex), str(device.associated_user_accounts), str(device.first_connected_time), str(device.first_connected_since_reboot_time), str(device.driver_install_time), str(device.first_install_time), str(device.last_insertion_time), str(device.last_removal_time)) self.InsertQuery(db, insert_values) db.close() now = datetime.now() print('[%s-%s-%s %s:%s:%s] USB DEVICES DONE' % (now.year, now.month, now.day, now.hour, now.minute, now.second))
def Parse(self, case_id, evd_id, par_list): """ Analyzes records related to usb all device in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "InstalledPrograms": # TODO : Fix a Chorme Cache plugin -> lack information #Select all installed programs query = r"SELECT description, datetime FROM log2timeline WHERE description like '%CurrentVersion#\\Uninstall#\%' escape '#' and description like '%DisplayName%'" result_query = db.execute_query_mul(query) programs_list = [] program_count = 0 if len(result_query) < 1: break else: for result_data in result_query: program_information = Program_Information() try: dataInside = r"DisplayName: \[REG_SZ\] (.*) DisplayVersion:" m = re.search(dataInside, result_data[0].decode('utf-8')) programs_list.append(program_information) programs_list[ program_count].program_name = m.group(1) if 'DisplayVersion' in result_data[0].decode( 'utf-8'): dataInside = r"DisplayVersion: \[REG_SZ\] ([\d\W]*)" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].version = m.group(1) if 'Publisher' in result_data[0].decode( 'utf-8'): if 'Microsoft Corporation' in result_data[ 0].decode('utf-8'): programs_list[ program_count].company = 'Microsoft Corporation' else: if 'Readme' in result_data[0].decode( 'utf-8'): dataInside = r"Publisher: \[REG_SZ\] (.*) Readme:" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].company = m.group( 1) elif 'RegCompany' in result_data[ 0].decode( 'utf-8' ) and 'Readme' not in result_data[ 0].decode('utf-8'): dataInside = r"Publisher: \[REG_SZ\] (.*) RegCompany:" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].company = m.group( 1) elif 'Readme' not in result_data[0].decode( 'utf-8' ) and 'RegCompany' not in result_data[ 0].decode( 'utf-8' ) and 'URLInfoAbout' in result_data[ 0].decode('utf-8'): dataInside = r"Publisher: \[REG_SZ\] (.*) URLInfoAbout" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].company = m.group( 1) elif 'Readme' not in result_data[0].decode( 'utf-8' ) and 'RegCompany' not in result_data[ 0].decode( 'utf-8' ) and 'URLInfoAbout' not in result_data[ 0].decode('utf-8'): dataInside = r"Publisher: \[REG_SZ\] (.*) UninstallString" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].company = m.group( 1) else: dataInside = r"Publisher: \[REG_SZ\] ([\w]*)" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].company = m.group( 1) if 'EstimatedSize' in result_data[0].decode( 'utf-8'): dataInside = r"EstimatedSize: \[REG_DWORD_LE\] ([\d]*)" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].install_size = m.group( 1) if 'InstallDate' in result_data[0].decode( 'utf-8'): dataInside = r"InstallDate: \[REG_SZ\] ([\d]*)" m = re.search( dataInside, result_data[0].decode('utf-8')) programs_list[ program_count].created_date = m.group( 1) if 'DisplayIcon' in result_data[0].decode( 'utf-8'): if ' UninstallString: [REG_SZ]' in result_data[ 0].decode('utf-8'): dataInside = r" UninstallString: \[REG_SZ\] (.*)\\(.*).exe" m = re.search( dataInside, result_data[0].decode('utf-8')) if m.group(1)[0] == '"': programs_list[ program_count].potential_location = m.group( 1)[1:] else: programs_list[ program_count].potential_location = m.group( 1) elif ' UninstallString: [REG_EXPAND_SZ]' in result_data[ 0].decode('utf-8'): dataInside = r" UninstallString: \[REG_EXPAND_SZ\] (.*)\\(.*).exe" m = re.search( dataInside, result_data[0].decode('utf-8')) if m.group(1)[0] == '"': programs_list[ program_count].potential_location = m.group( 1)[1:] else: programs_list[ program_count].potential_location = m.group( 1) programs_list[ program_count].key_last_updated_time = result_data[ 1] program_count = program_count + 1 except: print("MAX-REG_INSTALLED_PROGRAMS" + result_data[0].decode('utf-8') + "error") for program in programs_list: insert_values = (par[0], case_id, evd_id, str(program.program_name), str(program.company), str(program.created_date), str(program.key_last_updated_time), str(program.install_size), str(program.version), str(program.potential_location)) self.InsertQuery(db, insert_values) db.close() now = datetime.now() print('[%s-%s-%s %s:%s:%s] INSTALLED_PROGRAM DONE' % (now.year, now.month, now.day, now.hour, now.minute, now.second))
def Parse(self, case_id, evd_id, par_list): """ Analyzes records related to usb all device in Psort result. """ # Check Table if not self.CheckTable(): self.CreateTable() db = carpe_db.Mariadb() db.open() for par in par_list: for art in self.ARTIFACTS: name = art['Name'] desc = art['Desc'] values = art['Values'] if name == "os_info": # TODO : Fix a Chorme Cache plugin -> lack information OS_list = [] OS_count = 0 #Select Default OS query = r"SELECT description, filename, datetime FROM log2timeline WHERE description like '%#[HKEY_LOCAL_MACHINE#\\Software#\\Microsoft#\\Windows NT#\\CurrentVersion#]%' escape '#' or description like '%#[HKEY_LOCAL_MACHINE#\\System#\\ControlSet001#\\Control#\\ComputerName#\\ComputerName#]%' escape '#' or description like '%#[HKEY_LOCAL_MACHINE#\\System#\\ControlSet001#\\Control#\\FileSystem#]%' escape '#' or description like '%#[HKEY_LOCAL_MACHINE#\\System#\\ControlSet001#\\Control#\\TimeZoneInformation#]%' escape '#' or description like '%#[HKEY_LOCAL_MACHINE#\\System#\\ControlSet001#\\Services#\\Tcpip#\\Parameters#]%' escape '#' or description like '%#[HKEY_LOCAL_MACHINE#\\System#\\ControlSet001#\\Control#\\Windows#]%' escape '#'" result_query = db.execute_query_mul(query) os_information = OS_Information() try: OS_list.append(os_information) for result_data in result_query: if 'SafeOS' in result_data[1] or 'Windows.old' in result_data[1]: if 'CurrentVersion]' in result_data[0].decode('utf-8'): if 'ProductName' in result_data[0].decode('utf-8'): if 'RegisteredOwner' in result_data[0].decode('utf-8'): dataInside = r"ProductName: \[REG_SZ\] (.*) RegisteredOrganization: (.*) ReleaseId: \[REG_SZ\] (.*) SoftwareType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system = m.group(1)+'('+m.group(3)+')' else: dataInside = r"ProductName: \[REG_SZ\] (.*) ReleaseId: \[REG_SZ\] (.*) SoftwareType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system = m.group(1)+'('+m.group(2)+')' if 'CurrentVersion' in result_data[0].decode('utf-8'): dataInside = r" CurrentVersion: \[REG_SZ\] ([\d\W]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].version_number = m.group(1) if 'InstallTime' in result_data[0].decode('utf-8'): dataInside = r"InstallTime: \[REG_QWORD\] ([\d]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].install_time = datetime(1601, 1, 1) + timedelta(microseconds=int(m.group(1))/10) if 'RegisteredOwner' in result_data[0].decode('utf-8'): dataInside = r"RegisteredOwner: \[REG_SZ\] (.*) ReleaseId:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].owner = m.group(1) if 'EditionID' in result_data[0].decode('utf-8'): if 'InstallTime' in result_data[0].decode('utf-8'): dataInside = r"EditionID: \[REG_SZ\] (.*) InstallTime:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system_version = m.group(1) else: dataInside = r"EditionID: \[REG_SZ\] (.*) InstallationType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system_version = m.group(1) if 'CurrentBuildNumber' in result_data[0].decode('utf-8'): dataInside = r"CurrentBuildNumber: \[REG_SZ\] ([\d]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].build_number = m.group(1) if 'ProductId' in result_data[0].decode('utf-8'): dataInside = r"ProductId: \[REG_SZ\] (.*) ProductName:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].product_id = m.group(1) if 'RegisteredOrganizations' in result_data[0].decode('utf-8'): dataInside = r"RegisteredOrganizations: \[REG_SZ\] (.*) RegisteredOwner:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].Organization = m.group(1) if 'SystemRoot' in result_data[0].decode('utf-8'): dataInside = r"SystemRoot: \[REG_SZ\] (.*) UBR:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].system_root = m.group(1) if 'PathName' in result_data[0].decode('utf-8'): if 'ProductId' in result_data[0].decode('utf-8'): dataInside = r"PathName: \[REG_SZ\] (.*) ProductId:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].path = m.group(1) else: dataInside = r"PathName: \[REG_SZ\] (.*) ProductName:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].path = m.group(1) if 'ComputerName\ComputerName]' in result_data[0].decode('utf-8'): dataInside = r"ComputerName: \[REG_SZ\] (.*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].computer_name = m.group(1) if 'FileSystem]' in result_data[0].decode('utf-8'): dataInside = r"NtfsDisableLastAccessUpdate: \[REG_DWORD_LE\] ([\d])" m = re.search(dataInside, result_data[0].decode('utf-8')) if m.group(1) == '1': OS_list[OS_count].last_access_time_flag = 'No' else: OS_list[OS_count].last_access_time_flag = 'Yes' if 'TimeZoneInformation]' in result_data[0].decode('utf-8'): dataInside = r"TimeZoneKeyName: (.*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_timezone_name = m.group(1) if 'Tcpip\Parameters]' in result_data[0].decode('utf-8'): if 'Hostname' in result_data[0].decode('utf-8'): dataInside = r"Hostname: \[REG_SZ\] (.*) ICSDomain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_computer_name = m.group(1) dataInside = r"DhcpNameServer: \[REG_SZ\] (.*) Domain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].dhcp_dns_server = m.group(1) elif 'HostName' in result_data[0].decode('utf-8'): dataInside = r"HostName: \[REG_SZ\] (.*) ICSDomain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_computer_name = m.group(1) dataInside = r"DhcpNameServer: \[REG_SZ\] (.*) Domain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].dhcp_dns_server = m.group(1) if 'Control\Windows]' in result_data[0].decode('utf-8'): OS_list[OS_count].last_shutdown_time = str(result_data[2]) OS_count = OS_count + 1 os_information = OS_Information() OS_list.append(os_information) for result_data in result_query: if 'SafeOS' not in result_data[1] and 'Windows.old' not in result_data[1]: if 'CurrentVersion]' in result_data[0].decode('utf-8'): if 'ProductName' in result_data[0].decode('utf-8'): if 'RegisteredOwner' in result_data[0].decode('utf-8'): dataInside = r"ProductName: \[REG_SZ\] (.*) RegisteredOrganization: (.*) ReleaseId: \[REG_SZ\] (.*) SoftwareType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system = m.group(1)+'('+m.group(3)+')' else: dataInside = r"ProductName: \[REG_SZ\] (.*) ReleaseId: \[REG_SZ\] (.*) SoftwareType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system = m.group(1)+'('+m.group(2)+')' if 'CurrentVersion' in result_data[0].decode('utf-8'): dataInside = r" CurrentVersion: \[REG_SZ\] ([\d\W]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].version_number = m.group(1) if 'InstallTime' in result_data[0].decode('utf-8'): dataInside = r"InstallTime: \[REG_QWORD\] ([\d]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].install_time = datetime(1601, 1, 1) + timedelta(microseconds=int(m.group(1))/10) if 'RegisteredOwner' in result_data[0].decode('utf-8'): dataInside = r"RegisteredOwner: \[REG_SZ\] (.*) ReleaseId:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].owner = m.group(1) if 'EditionID' in result_data[0].decode('utf-8'): if 'InstallTime' in result_data[0].decode('utf-8'): if 'EditionSubManufacturer' in result_data[0].decode('utf-8'): dataInside = r" EditionID: \[REG_SZ\] (.*) EditionSubManufacturer:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system_version = m.group(1) else: dataInside = r" EditionID: \[REG_SZ\] (.*) InstallTime:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system_version = m.group(1) else: dataInside = r" EditionID: \[REG_SZ\] (.*) InstallationType:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].operating_system_version = m.group(1) if 'CurrentBuildNumber' in result_data[0].decode('utf-8'): dataInside = r"CurrentBuildNumber: \[REG_SZ\] ([\d]*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].build_number = m.group(1) if 'ProductId' in result_data[0].decode('utf-8'): dataInside = r"ProductId: \[REG_SZ\] (.*) ProductName:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].product_id = m.group(1) if 'RegisteredOrganizations' in result_data[0].decode('utf-8'): dataInside = r"RegisteredOrganizations: \[REG_SZ\] (.*) RegisteredOwner:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].Organization = m.group(1) if 'SystemRoot' in result_data[0].decode('utf-8'): dataInside = r"SystemRoot: \[REG_SZ\] (.*) UBR:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].system_root = m.group(1) if 'PathName' in result_data[0].decode('utf-8'): dataInside = r"PathName: \[REG_SZ\] (.*) ProductId:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].path = m.group(1) if 'ComputerName\ComputerName]' in result_data[0].decode('utf-8'): dataInside = r"ComputerName: \[REG_SZ\] (.*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].computer_name = m.group(1) if 'FileSystem]' in result_data[0].decode('utf-8'): dataInside = r"NtfsDisableLastAccessUpdate: \[REG_DWORD_LE\] (.*) NtfsDisableLfsDowngrade:" m = re.search(dataInside, result_data[0].decode('utf-8')) if m.group(1) == '1': OS_list[OS_count].last_access_time_flag = 'No' elif m.group(1) == '0': OS_list[OS_count].last_access_time_flag = 'Yes' else: OS_list[OS_count].last_access_time_flag = m.group(1) + '(not parsed)' if 'TimeZoneInformation]' in result_data[0].decode('utf-8'): dataInside = r"TimeZoneKeyName: (.*)" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_timezone_name = m.group(1) if 'Tcpip\Parameters]' in result_data[0].decode('utf-8'): if 'HostName' in result_data[0].decode('utf-8'): dataInside = r"HostName: \[REG_SZ\] (.*) ICSDomain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_computer_name = m.group(1) dataInside = r"DhcpNameServer: \[REG_SZ\] (.*) Domain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].dhcp_dns_server = m.group(1) elif 'Hostname' in result_data[0].decode('utf-8'): dataInside = r"Hostname: \[REG_SZ\] (.*) ICSDomain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].display_computer_name = m.group(1) dataInside = r"DhcpNameServer: \[REG_SZ\] (.*) Domain:" m = re.search(dataInside, result_data[0].decode('utf-8')) OS_list[OS_count].dhcp_dns_server = m.group(1) if 'Control\Windows]' in result_data[0].decode('utf-8'): OS_list[OS_count].last_shutdown_time = str(result_data[2]) except: print("MAX-OS_INFO" + result_data[0].decode('utf-8')+"error") try: query1 = r"SELECT description, filename FROM log2timeline WHERE description like '%#Windows NT#\\CurrentVersion#\\Time Zones#\\" + \ OS_list[0].display_timezone_name + "%' escape '#' and (filename like '%SafeOS%' or filename like '%Windows.old%')" query2 = r"SELECT description, filename FROM log2timeline WHERE description like '%#Windows NT#\\CurrentVersion#\\Time Zones#\\" + \ OS_list[1].display_timezone_name + "%' escape '#' and (filename not like '%SafeOS%' and filename not like '%Windows.old%')" result_data = db.execute_query_mul(query1) result_data2 = db.execute_query_mul(query2) dataInside = r" Display: \[REG_SZ\] (.*) Dlt:" m = re.search(dataInside, result_data[0][0].decode('utf-8')) OS_list[0].timezone_utc = m.group(1) m = re.search(dataInside, result_data2[0][0].decode('utf-8')) OS_list[1].timezone_utc = m.group(1) except: print("MAX-OSINFO-Timezone_error") for os in OS_list: insert_values = (par[0], case_id, evd_id, str(os.operating_system), str(os.version_number), str(os.install_time), str(os.product_key), str(os.owner), str(os.display_computer_name), str(os.computer_name), str(os.dhcp_dns_server), str(os.operating_system_version), str(os.build_number), str(os.product_id), str(os.last_service_pack), str(os.organization), str(os.last_shutdown_time), str(os.system_root), str(os.path), str(os.last_access_time_flag), str(os.timezone_utc), str(os.display_timezone_name)) self.InsertQuery(db, insert_values) db.close() now = datetime.now() print('[%s-%s-%s %s:%s:%s] OS INFO DONE' % (now.year, now.month, now.day, now.hour, now.minute, now.second))