def get_HideX(files_found, report_folder, seeker, wrap_text): for file_found in files_found: file_found = str(file_found) if file_found.endswith('hidex.db'): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' SELECT id, packageName, case isActive WHEN 0 then '' WHEN 1 then 'Yes' end FROM p_lock_app ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('HideX - Locked Apps') report.start_artifact_report(report_folder, 'HideX - Locked Apps') report.add_script() data_headers = ('ID', 'Package Name', 'Is Active') data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'HideX' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No HideX data available') db.close() return
def get_healthWeight(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH') AS "DATE", QUANTITY AS "WEIGHT (IN KG)", QUANTITY*2.20462 AS "WEIGHT (IN LBS)", SAMPLES.DATA_ID AS "SAMPLES TABLE ID" FROM SAMPLES LEFT OUTER JOIN QUANTITY_SAMPLES ON SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID WHERE SAMPLES.DATA_TYPE = 3 AND "DATE" IS NOT NULL ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3])) report = ArtifactHtmlReport('Health Weight') report.start_artifact_report(report_folder, 'Weight') report.add_script() data_headers = ('Date', 'Weight in KG', 'Weight in LBS', 'Samples Table ID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Health Weight' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No data available in table') db.close() return
def get_healthFlights(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH') AS "START DATE", DATETIME(SAMPLES.END_DATE + 978307200, 'UNIXEPOCH') AS "END DATE", QUANTITY AS "FLIGHTS CLIMBED", (SAMPLES.END_DATE-SAMPLES.START_DATE) AS "TIME IN SECONDS", SAMPLES.DATA_ID AS "SAMPLES TABLE ID" FROM SAMPLES LEFT OUTER JOIN QUANTITY_SAMPLES ON SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID WHERE SAMPLES.DATA_TYPE = 12 """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries == 0: logfunc('No data available in table') else: for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4])) description = '' report = ArtifactHtmlReport('Health Flights Climbed') report.start_artifact_report(report_folder, 'Flights Climbed', description) report.add_script() data_headers = ('Start Date', 'End Date', 'Flights Climbed', 'Time in Seconds', 'Samples Table ID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Health Flights' tsv(report_folder, data_headers, data_list, tsvname)
def get_smyfilesStored(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT storage, path, size, datetime(date / 1000, "unixepoch"), datetime(latest /1000, "unixepoch") from FileCache where path is not NULL ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('My Files DB - Stored Files') report.start_artifact_report(report_folder, 'My Files DB - Stored Files') report.add_script() data_headers = ( 'Storage', 'Path', 'Size', 'Timestamp', 'Latest' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'my files db - stored files' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No My Files DB Stored data available') db.close() return
def get_sbrowserWebsearch(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT url, title, visit_count, datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch") FROM urls WHERE url like '%search?q=%' ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Web Search Terms') report.start_artifact_report(report_folder, 'Browser Web Search Terms') report.add_script() data_headers = ( 'Search Term', 'URL', 'Title', 'Visit Count', 'Last Visit Time' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: search = row[0].split('search?q=')[1].split('&')[0] search = search.replace('+', ' ') search = search.replace('%20', ' ') data_list.append( (search, (textwrap.fill(row[0], width=100)), row[1], row[2], row[3])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() else: logfunc('No web search terms data available') db.close() return
def get_discordAcct(files_found, report_folder, seeker): searchlist = [] for file_found in files_found: file_found = str(file_found) for s in strings(file_found): #print(type(s)) #print(s) searchlist.append(str(s), ) counter = 0 data_list = [] for x in searchlist: counter += 1 if 'user_id_cache' in x: #print(x) wf = searchlist[counter].split('"') try: data_list.append(('USER_ID_CACHE', wf[1])) except: pass if 'email_cache' in x: #print(x) wfa = searchlist[counter].split('"') try: data_list.append(('EMAIL_CACHE', wfa[1])) except: pass if len(data_list) > 0: report = ArtifactHtmlReport('Discord Account') report.start_artifact_report(report_folder, 'Discord Account') report.add_script() data_headers = ('Key', 'Value') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Discord Account' tsv(report_folder, data_headers, data_list, tsvname)
def read_sms_messages(db, report_folder, file_found, seeker, wrap_text): cursor = db.cursor() cursor.execute(sms_query) all_rows = cursor.fetchall() entries = len(all_rows) if entries > 0: report = ArtifactHtmlReport('SMS messages') report.start_artifact_report(report_folder, 'SMS messages') report.add_script() data_headers = ('Date', 'MSG ID', 'Thread ID', 'Address', 'Contact ID', 'Date sent', 'Read', 'Type', 'Body', 'Service Center', 'Error code') data_list = [] for row in all_rows: if wrap_text: data_list.append( (row['date'], row['msg_id'], row['thread_id'], row['address'], row['person'], row['date_sent'], row['read'], row['type'], row['body'].replace("\n", ""), row['service_center'], row['error_code'])) else: data_list.append( (row['date'], row['msg_id'], row['thread_id'], row['address'], row['person'], row['date_sent'], row['read'], row['type'], row['body'], row['service_center'], row['error_code'])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'sms messages' tsv(report_folder, data_headers, data_list, tsvname, file_found.replace(seeker.directory, '')) tlactivity = f'SMS Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No SMS messages found!') return False return True
def get_dataUsageProcessB(files_found, report_folder, seeker): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' select datetime(zprocess.ztimestamp+ 978307200, 'unixepoch'), datetime(zprocess.zfirsttimestamp + 978307200, 'unixepoch'), zprocess.zprocname, zprocess.zbundlename from zprocess ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3])) report = ArtifactHtmlReport('Data Usage') report.start_artifact_report(report_folder, 'Data Usage Process') report.add_script() data_headers = ('Timestamp','Process First Timestamp','Process Name','Bundle ID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Data Usage Process' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Data Usage Process' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Data Usage available') db.close() return
def get_googlePlaySearches(files_found, report_folder, seeker, wrap_text): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' SELECT datetime(date / 1000, "unixepoch"), display1, query from suggestions ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Google Play Searches') report.start_artifact_report(report_folder, 'Google Play Searches') report.add_script() data_headers = ( 'Timestamp', 'Display', 'query' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'google play searches' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'Google Play Searches' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Play Searches data available') db.close() return
def get_queryPredictions(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' select datetime(creationTimestamp, "UNIXEPOCH") as START, content, isSent, conversationId, id, uuid from messages ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4], row[5])) report = ArtifactHtmlReport('Query Predictions') report.start_artifact_report(report_folder, 'Query Predictions') report.add_script() data_headers = ('Timestamp', 'Content', 'Is Sent?', 'Conversation ID', 'ID', 'UUID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Query Predictions' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Query Predictions' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data available in table') db.close() return
def get_fitbitHeart(files_found, report_folder, seeker, wrap_text): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' SELECT datetime("DATE_TIME"/1000, 'unixepoch'), AVERAGE_HEART_RATE, RESTING_HEART_RATE FROM HEART_RATE_DAILY_SUMMARY ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Fitbit Heart Rate Summary') report.start_artifact_report(report_folder, 'Fitbit Heart Rate Summary') report.add_script() data_headers = ('Timestamp', 'Avg. Heart Rate', 'Resting Heart Rate') data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'Fitbit Heart Rate Summary' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'Fitbit Heart Rate Summary' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Fitbit Heart Rate Summary data available') db.close()
def get_appleMapsSearchHistory(files_found, report_folder, seeker): data_list = [] for file_found in files_found: file_found = str(file_found) with open(file_found, "rb") as plist_file: plist_content = plistlib.load(plist_file) for entry in plist_content['MSPHistory']['records']: search_history = plist_content['MSPHistory']['records'][entry] content = search_history.get('contents').decode( 'UTF-8', 'ignore') timestamp = search_history.get('modificationDate') formatted_timestamp = timestamp.strftime('%Y-%m-%d %H:%M:%S') if len(content) < 300: id_search_entry = content.split('\n') search_entry = id_search_entry[1].split('"') search_entry_split = str(search_entry[0]).split('\x12') search_entry_filtered = list( filter(None, search_entry_split)) data_list.append((formatted_timestamp, ', '.join(search_entry_filtered))) if len(data_list) > 0: report = ArtifactHtmlReport('Apple Maps Search History') report.start_artifact_report(report_folder, 'Apple Maps Search History') report.add_script() data_headers = ("Timestamp", "Search Entry") report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Apple Maps Search History' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Apple Maps Search History' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data available for Apple Maps Search History')
def get_geodApplications(files_found, report_folder, seeker): file_found = str(files_found[0]) os.chmod(file_found, 0o0777) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT count_type, app_id, createtime FROM mkcount """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: data_list.append((row[2], row[0], row[1])) description = '' report = ArtifactHtmlReport('Geolocation') report.start_artifact_report(report_folder, 'Applications', description) report.add_script() data_headers = ("Creation Time", "Count ID", "Application") report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) report.end_artifact_report() tsvname = 'Geolocation Applications' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Geolocation Applications' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data available for Geolocation Applications') db.close() return
def get_smyfilesRecents(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' select name, size, datetime(date / 1000, "unixepoch"), _data, ext, _source, _description, datetime(recent_date / 1000, "unixepoch") from recent_files ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('My Files DB - Recent Files') report.start_artifact_report(report_folder, 'My Files DB - Recent Files') report.add_script() data_headers = ('Name', 'Size', 'Timestamp', 'Data', 'Ext.', 'Source', 'Description', 'Recent Timestamp') data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() else: logfunc('No My Files DB Recents data available') db.close() return
def get_BashHistory(files_found, report_folder, seeker, wrap_text): data_list = [] file_found = str(files_found[0]) counter = 1 with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile: for row in csvfile: data_list.append((counter, row)) counter += 1 if len(data_list) > 0: report = ArtifactHtmlReport('Bash History') report.start_artifact_report(report_folder, f'Bash History') report.add_script() data_headers = ('Order', 'Command') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'Bash History' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc(f'No Bash History file available')
def get_coreDuetPlugin(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT DATETIME(ZCREATIONDATE+978307200,'UNIXEPOCH') AS "TIMESTAMP", TIME(ZCREATIONDATE-ZLOCALTIME,'UNIXEPOCH') AS "TIME ZONE", CASE ZCABLESTATE WHEN "0" THEN "UNPLUGGED" WHEN "1" THEN "PLUGGED IN" END "CABLE STATE" FROM ZCDDMPLUGINEVENT """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3])) description = '' report = ArtifactHtmlReport('CoreDuet Plugged In') report.start_artifact_report(report_folder, 'Plugged In', description) report.add_script() data_headers = ('Timestamp', 'Time Zone', 'Cable State') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'CoreDuet Plugged In' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Coreduet Plugged In' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data available in table')
def get_chromeOfflinePages(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT datetime(creation_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch") as creation_time, datetime(last_access_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch") as last_access_time, online_url, file_path, title, access_count, file_size from offlinepages_v1 ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Offline Pages') report.start_artifact_report(report_folder, 'Offline Pages') report.add_script() data_headers = ( 'Creation Time', 'Last Access Time', 'Online URL', 'File Path', 'Title', 'Access Count', 'File Size' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: data_list.append( (row[0], row[1], (textwrap.fill(row[2], width=75)), row[3], row[4], row[5], row[6])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() else: logfunc('No Chrome Offline Pages data available') db.close() return
def get_coreDuetPlugin(files_found, report_folder, seeker): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(""" select datetime(zcreationdate+978307200,'unixepoch'), time(zcreationdate-zlocaltime,'unixepoch'), case zcablestate when '0' then 'unplugged' when '1' then 'plugged in' end from zcddmpluginevent """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3])) description = '' report = ArtifactHtmlReport('CoreDuet Plugged In') report.start_artifact_report(report_folder, 'Plugged In', description) report.add_script() data_headers = ('Timestamp', 'Time Zone', 'Cable State') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'CoreDuet Plugged In' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Coreduet Plugged In' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data available in table')
def get_bluetoothOther(files_found, report_folder, seeker): file_found = str(files_found[0]) os.chmod(file_found, 0o0777) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT Name, Address, LastSeenTime, Uuid FROM OtherDevices order by Name desc """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: data_list.append((row[0], row[1], row[2], row[3])) description = '' report = ArtifactHtmlReport('Bluetooth Other') report.start_artifact_report(report_folder, 'Other', description) report.add_script() data_headers = ('Name', 'Address', 'Last Seen Time', 'UUID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Bluetooth Other' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No data available for Bluetooth Other') db.close() return
def get_dataUsageProcessA(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT DATETIME(ZPROCESS.ZTIMESTAMP+ 978307200, 'UNIXEPOCH') AS "TIMESTAMP", DATETIME(ZPROCESS.ZFIRSTTIMESTAMP + 978307200, 'UNIXEPOCH') AS "PROCESS FIRST TIMESTAMP", ZPROCESS.ZPROCNAME AS "PROCESS NAME", ZPROCESS.ZBUNDLENAME AS "BUNDLE ID", ZPROCESS.Z_PK AS "ZPROCESS TABLE ID" FROM ZPROCESS ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4])) report = ArtifactHtmlReport('Data Usage') report.start_artifact_report(report_folder, 'Data Usage Process') report.add_script() data_headers = ('Timestamp', 'Process First Timestamp', 'Process Name', 'Bundle ID', 'Table ID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Data Usage Process' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Data Usage Process' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Data Usage available') db.close() return
def get_iconsScreen(files_found, report_folder, seeker): data_list = [] data_pre_list = [] file_found = str(files_found[0]) with open(file_found, "rb") as fp: plist = plistlib.load(fp) for key, val in plist.items(): if key == "buttonBar": bbar = val elif key == "iconLists": icon = val for x in range(0, len(icon)): page = icon[x] htmlstring = (f"<table><tr>") htmlstring = htmlstring + (f'<td colspan="4"> Icons screen #{x}</td>') for y in range(0, len(page)): rows = page[y] if (y == 0) or (y % 4 == 0): htmlstring = htmlstring + ("</tr><tr>") htmlstring = htmlstring + (f"<td width = 25%>{rows}</td>") htmlstring = htmlstring + ("</tr></table>") data_list.append((htmlstring,)) htmlstring = '' htmlstring = (f'<table><tr> <td colspan="4"> Icons bottom bar</td></tr><tr>') for x in range(0, len(bbar)): htmlstring = htmlstring +(f"<td width = 25%>{bbar[x]}</td>") htmlstring = htmlstring +("</tr></table>") data_list.append((htmlstring,)) logfunc("Screens: " + str(len(icon))) report = ArtifactHtmlReport(f'Apps per screen') report.start_artifact_report(report_folder, f'Apps per screen') report.add_script() data_headers = ((f'Apps per Screens',)) report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) report.end_artifact_report()
def get_sbrowserBookmarks(files_found, report_folder, seeker): file_found = str(files_found[0]) with open(file_found, "r") as f: dataa = json.load(f) report = ArtifactHtmlReport('Browser Bookmarks') report.start_artifact_report(report_folder, 'Browser Bookmarks') report.add_script() data_headers = ('URL', 'Added Date', 'Name', 'Parent', 'Type') data_list = [] for x, y in dataa.items(): flag = 0 if isinstance(y, dict): for key, value in y.items(): #print(key, '->', value) if isinstance(value, dict): for keyb, valueb in value.items(): if keyb == 'children': if len(valueb) > 0: url = valueb[0]['url'] dateadd = valueb[0]['date_added'] dateaddconv = datetime.datetime( 1601, 1, 1) + datetime.timedelta( microseconds=int(dateadd)) name = valueb[0]['name'] typed = valueb[0]['type'] flag = 1 if keyb == 'name' and flag == 1: flag = 0 parent = valueb data_list.append( (url, dateaddconv, name, parent, typed)) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() #else: # logfunc('No Chrome Login Data available') return
def get_accs(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT DATETIME(ZDATE+978307200,'UNIXEPOCH','UTC' ) AS 'ZDATE TIMESTAMP', ZACCOUNTTYPEDESCRIPTION, ZUSERNAME, ZACCOUNTDESCRIPTION, ZACCOUNT.ZIDENTIFIER, ZACCOUNT.ZOWNINGBUNDLEID FROM ZACCOUNT JOIN ZACCOUNTTYPE ON ZACCOUNTTYPE.Z_PK=ZACCOUNT.ZACCOUNTTYPE ORDER BY ZACCOUNTTYPEDESCRIPTION """) all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4], row[5])) report = ArtifactHtmlReport('Account Data') report.start_artifact_report(report_folder, 'Account Data') report.add_script() data_headers = ('Timestamp', 'Account Desc.', 'Username', 'Description', 'Identifier', 'Bundle ID') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Account Data' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Account Data' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc("No Account Data available")
def get_geodPDPlaceCache(files_found, report_folder, seeker): file_found = str(files_found[0]) os.chmod(file_found, 0o0777) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT requestkey, pdplacelookup.pdplacehash, datetime('2001-01-01', "lastaccesstime" || ' seconds') as lastaccesstime, datetime('2001-01-01', "expiretime" || ' seconds') as expiretime, pdplace FROM pdplacelookup INNER JOIN pdplaces on pdplacelookup.pdplacehash = pdplaces.pdplacehash """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: pd_place = ''.join(f'{row}<br>' for row in set(strings(row[4]))) data_list.append((row[0], row[1], row[2], row[3], pd_place)) description = '' report = ArtifactHtmlReport('Geolocation') report.start_artifact_report(report_folder, 'PD Place Cache', description) report.add_script() data_headers = ("requestkey", "pdplacehash", "last access time", "expire time", "pd place") report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) report.end_artifact_report() tsvname = 'Geolocation PD Place Caches' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No data available for Geolocation PD Place Caches') db.close() return
def get_coreDuetLock(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute( """ SELECT DATETIME(ZCREATIONDATE+978307200,"UNIXEPOCH") AS "CREATE TIME", TIME(ZLOCALTIME,"UNIXEPOCH") AS "LOCAL DEVICE TIME", TIME(ZCREATIONDATE-ZLOCALTIME,"UNIXEPOCH") AS "TIME ZONE", CASE ZLOCKSTATE WHEN "0" THEN "UNLOCKED" WHEN "1" THEN "LOCKED" END "LOCK STATE" FROM ZCDDMSCREENLOCKEVENT """ ) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3] )) description = '' report = ArtifactHtmlReport('CoreDuet Lock State') report.start_artifact_report(report_folder, 'Lock State', description) report.add_script() data_headers = ('Create Time','Local Device Time','Time Zone','Lock State' ) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'CoreDuet Lock State' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No data available in table')
def get_aggDictScalars(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute( """ SELECT DATE(DAYSSINCE1970*86400, 'unixepoch') AS DAY, KEY AS "KEY", VALUE AS "VALUE" FROM SCALARS """ ) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: data_list.append((row[0], row[1], row[2] )) description = '' report = ArtifactHtmlReport('Aggregate Dictionary Scalars') report.start_artifact_report(report_folder, 'Scalars', description) report.add_script() data_headers = ('Day','Key','Value' ) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Agg Dict Scalars' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Aggregate Dictionary Distributed Keys' timeline(report_folder, tlactivity, data_list) else: logfunc("No Aggregate Dictionary Distributed Keys Data available")
def get_aggDictpasscodetype(files_found, report_folder, seeker): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(""" select date(dayssince1970*86400, 'unixepoch'), key, case when value=-1 then '6-digit' when value=0 then 'no passcode' when value=1 then '4-digit' when value=2 then 'custom alphanumeric' when value=3 then 'custom numeric' else "n/a" end "value" from scalars where key like 'com.apple.passcode.passcodetype%' """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2])) description = '' report = ArtifactHtmlReport('Aggregate Dictionary Passcode Type') report.start_artifact_report(report_folder, 'Passcode Type', description) report.add_script() data_headers = ('Day', 'Key', 'Value') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Agg Dict Dictionary Passcode Type' tsv(report_folder, data_headers, data_list, tsvname)
def get_installedappsLibrary(files_found, report_folder, seeker): file_found = str(files_found[0]) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT account, doc_id, case when purchase_time = 0 THEN '' when purchase_time > 0 THEN datetime(purchase_time / 1000, "unixepoch") END as pt FROM ownership ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Installed Apps (Library)') report.start_artifact_report(report_folder, 'Installed Apps (Library)') report.add_script() data_headers = ('Account', 'Doc ID', 'Purchase Time') data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'installed apps library' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No Installed Apps (Library) data available') db.close() return
def get_appLockerfishingnetpat(files_found, report_folder, seeker, wrap_text): standardKey = '526e7934384e693861506a59436e5549' standardIV = '526e7934384e693861506a59436e5549' data_list = [] for file_found in files_found: file_found = str(file_found) tree = ET.parse(file_found) root = tree.getroot() encryptedPattern = root.findall( './string[@name="85B064D26810275C89F1F2CC15E20B442E98874398F16F6717BBD5D34920E3F8"]' )[0].text cipher = AES.new(bytes.fromhex(standardKey), AES.MODE_CBC, bytes.fromhex(standardIV)) decryptedPattern = unpad( cipher.decrypt(bytes.fromhex(encryptedPattern)), AES.block_size) data_list.append((encryptedPattern, decryptedPattern)) if data_list: report = ArtifactHtmlReport('Calculator Locker Pattern') report.start_artifact_report(report_folder, 'Calculator Locker Pattern') report.add_script() data_headers = ('Encrypted Pattern', 'Decrypted Pattern') report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Media']) report.end_artifact_report() tsvname = f'Calculator Locker Pattern data' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No Calculator Locker Pattern data available')
def get_smembersEvents(files_found, report_folder, seeker, wrap_text): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' select datetime(created_at /1000, "unixepoch"), type, value, in_snapshot FROM device_events ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Samsung Members - Events') report.start_artifact_report(report_folder, 'Samsung Members - Events') report.add_script() data_headers = ('Created At','Type','Value','Snapshot?' ) data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'samsung members - events' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'Samsung Members - Events' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Samsung Members - Events data available') db.close() return