def ReadNotesV2_V4_V6(db, notes, version, source, user): '''Reads NotesVx.storedata, where x= 2,4,6,7''' try: query = "SELECT n.Z_PK as note_id, n.ZDATECREATED as created, n.ZDATEEDITED as edited, n.ZTITLE as title, "\ " (SELECT ZNAME from ZFOLDER where n.ZFOLDER=ZFOLDER.Z_PK) as folder, "\ " (SELECT zf2.ZACCOUNT from ZFOLDER as zf1 LEFT JOIN ZFOLDER as zf2 on (zf1.ZPARENT=zf2.Z_PK) where n.ZFOLDER=zf1.Z_PK) as folder_parent_id, "\ " ac.ZEMAILADDRESS as email, ac.ZACCOUNTDESCRIPTION as acc_desc, ac.ZUSERNAME as username, b.ZHTMLSTRING as data, "\ " att.ZCONTENTID as att_id, att.ZFILEURL as file_url "\ " FROM ZNOTE as n "\ " LEFT JOIN ZNOTEBODY as b ON b.ZNOTE = n.Z_PK "\ " LEFT JOIN ZATTACHMENT as att ON att.ZNOTE = n.Z_PK "\ " LEFT JOIN ZACCOUNT as ac ON ac.Z_PK = folder_parent_id" db.row_factory = sqlite3.Row cursor = db.execute(query) for row in cursor: try: att_path = '' if row['file_url'] != None: att_path = ReadAttPathFromPlist(row['file_url']) note = Note( row['note_id'], row['folder'], row['title'], '', row['data'], row['att_id'], att_path, row['acc_desc'], row['email'], row['username'], CommonFunctions.ReadMacAbsoluteTime(row['created']), CommonFunctions.ReadMacAbsoluteTime(row['edited']), version, 0, '', user, source) notes.append(note) except (sqlite3.Error, KeyError): log.exception('Error fetching row data') except sqlite3.Error: log.exception('Query execution failed. Query was: ' + query)
def parse_hsts_plist(plist, cookies, user_name, plist_path): '''Parse plist and add items to cookies list''' hsts_store = plist.get('com.apple.CFNetwork.defaultStorageSession', None) if hsts_store: for site, items in hsts_store.items(): if isinstance(items, dict): # Newer hsts (version 3 or higher seen) c_time = CommonFunctions.ReadMacAbsoluteTime(items.get('Create Time', None)) e_time = items.get('Expiry', None) if e_time in (float('inf'), float('-inf')): e_time = None else: e_time = CommonFunctions.ReadMacAbsoluteTime(e_time) elif isinstance(items, float): # In older hsts file (version 1): c_time = None e_time = items if e_time in (float('inf'), float('-inf')): e_time = None else: e_time = CommonFunctions.ReadMacAbsoluteTime(e_time) #else: # log.error(f"Unknown type for hsts items : {str(type(items))}") # This is HSTS_Content_Version and schema cookies.append(Cookie(site, c_time, e_time, '', '','', user_name, plist_path)) else: log.error(f'Did not find com.apple.CFNetwork.defaultStorageSession in {plist_path}')
def GetFileMACTimesFromFileRecord(self, v): times = { 'c_time':None, 'm_time':None, 'cr_time':None, 'a_time':None } catalog_file = v.data times['c_time'] = CommonFunctions.ReadMacHFSTime(catalog_file.attributeModDate) times['m_time'] = CommonFunctions.ReadMacHFSTime(catalog_file.contentModDate) times['cr_time'] = CommonFunctions.ReadMacHFSTime(catalog_file.createDate) times['a_time'] = CommonFunctions.ReadMacHFSTime(catalog_file.accessDate) return times
def Plugin_Start_Standalone(input_files_list, output_params): log.info("Module Started as standalone") for input_path in input_files_list: log.debug("Input file passed was: " + input_path) safari_items = [] if input_path.endswith('.plist'): try: plist = readPlist(input_path) if input_path.lower().endswith('com.apple.safari.plist'): ReadSafariPlist(plist, safari_items, input_path, '') elif input_path.endswith('History.plist'): ReadHistoryPlist(plist, safari_items, input_path, '') elif input_path.endswith('Downloads.plist'): ReadDownloadsPlist(plist, safari_items, input_path, '') elif input_path.endswith('Bookmarks.plist'): ReadBookmarksPlist(plist, safari_items, input_path, '') elif input_path.endswith('TopSites.plist'): ReadTopSitesPlist(plist, safari_items, input_path, '') elif input_path.endswith('LastSession.plist'): ReadLastSessionPlist(plist, safari_items, input_path, '') elif input_path.endswith('Extensions.plist'): ReadExtensionsPlist(plist, safari_items, input_path, '') elif input_path.endswith('RecentlyClosedTabs.plist'): ReadRecentlyClosedTabsPlist(plist, safari_items, input_path, '') else: log.error("Unknown plist type encountered: {}".format(os.path.basename(input_path))) except ValueError as ex: log.exception('Failed to open file: {}'.format(input_path)) elif input_path.endswith('History.db'): log.info ("Processing file " + input_path) try: conn = CommonFunctions.open_sqlite_db_readonly(input_path) log.debug ("Opened database successfully") ReadHistoryDb(conn, safari_items, input_path, '') except (sqlite3.Error, OSError) as ex: log.exception ("Failed to open database, is it a valid SQLITE DB?") elif input_path.endswith('CloudTabs.db'): log.info ("Processing file " + input_path) try: conn = CommonFunctions.open_sqlite_db_readonly(input_path) log.debug ("Opened database successfully") ReadCloudTabsDb(conn, safari_items, input_path, '') except (sqlite3.Error, OSError) as ex: log.exception ("Failed to open database, is it a valid SQLITE DB?") elif input_path.endswith('BrowserState.db'): log.info ("Processing file " + input_path) try: conn = CommonFunctions.open_sqlite_db_readonly(input_path) log.debug ("Opened database successfully") ReadBrowserStateDb(conn, safari_items, input_path, '') except (sqlite3.Error, OSError) as ex: log.exception ("Failed to open database, is it a valid SQLITE DB?") else: log.error('Input file {} is not a recognized name of a Safari artifact!'.fromat(input_path)) if len(safari_items) > 0: PrintAll(safari_items, output_params, input_path) else: log.info('No safari items found in {}'.format(input_path))
def ReadHistoryDb(chrome_artifacts, db, file_size, user, source): db.row_factory = sqlite3.Row cursor = db.cursor() query = """SELECT urls.url, urls.title, urls.visit_count, urls.hidden, v.visit_time, v.visit_duration, v.from_visit, (SELECT urls.url FROM urls LEFT JOIN visits ON urls.id = visits.url where visits.id=v.from_visit) as referrer FROM urls LEFT JOIN visits v ON urls.id = v.url ORDER BY v.visit_time""" cursor = db.execute(query) for row in cursor: visit_duration = row['visit_duration'] visit_time = row['visit_time'] if visit_duration and (visit_time > 0): end_time = CommonFunctions.ReadChromeTime(visit_time + visit_duration) else: end_time = None item = ChromeItem( ChromeItemType.HISTORY, row['url'], row['title'], CommonFunctions.ReadChromeTime(visit_time), end_time, None, row['referrer'], f"VisitCount={row['visit_count']}, Hidden={row['hidden']}", user, source) chrome_artifacts.append(item) # downloaded files query = """SELECT current_path, target_path, start_time, end_time, received_bytes, total_bytes, c.url, referrer FROM downloads LEFT JOIN downloads_url_chains c ON c.id = downloads.id where c.chain_index = 0 ORDER BY start_time""" cursor = db.execute(query) for row in cursor: start_time = CommonFunctions.ReadChromeTime(row['start_time']) if start_time == '': start_time = None end_time = CommonFunctions.ReadChromeTime(row['end_time']) if end_time == '': end_time = None path = row['target_path'] if not path: path = row['current_path'] downloaded_file_name = os.path.basename(path) item = ChromeItem( ChromeItemType.DOWNLOAD, row['url'], downloaded_file_name, start_time, end_time, row['referrer'], path, f"Received Bytes = {row['received_bytes']}/{row['total_bytes']}", user, source) chrome_artifacts.append(item)
def ReadHistoryPlist(plist, safari_items, source_path, user): try: version = plist['WebHistoryFileVersion'] if version != 1: log.warning('WebHistoryFileVersion is {}, this may not parse properly!'.format(version)) except KeyError: log.error('WebHistoryFileVersion not found') try: history_dates = plist['WebHistoryDates'] for item in history_dates: try: redirect_urls = ",".join(item.get('redirectURLs', '')) si = SafariItem(SafariItemType.HISTORY, item.get('',''), item.get('title', ''), \ CommonFunctions.ReadMacAbsoluteTime(item.get('lastVisitedDate', '')), \ '' if (redirect_urls == '') else ('REDIRECT_URLS:' + redirect_urls) , user, source_path) # Skipped visitCount safari_items.append(si) except ValueError as ex: log.error(str(ex)) except KeyError: log.error('WebHistoryDates not found') try: history_domains = plist['WebHistoryDomains.v2'] for item in history_domains: si = SafariItem(SafariItemType.HISTORYDOMAINS, '', item.get('', ''), None, 'ITEMCOUNT:' + str(item.get('itemCount', 0)) , user, source_path) safari_items.append(si) except KeyError: log.error('WebHistoryDomains.v2 not found')
def Plugin_Start_Standalone(input_files_list, output_params): log.info("Module Started as standalone") for input_path in input_files_list: log.debug("Input file passed was: " + input_path) if input_path.endswith('.LastGKReject'): success, plist, error = CommonFunctions.ReadPlist(input_path) if success: ReadLastGKRejectPlist(plist) else: log.error('Failed to read file: {}. {}'.format( input_path, error)) elif input_path.endswith('QuarantineEventsV2'): quarantined = [] db = OpenDb(input_path) if db != None: filename = os.path.basename(input_path) ReadQuarantineDb(db, quarantined, input_path, '') db.close() else: log.error(f'Failed to open database {input_path}') if len(quarantined) > 0: PrintAll(quarantined, output_params) else: log.info('No quarantine events found in {}'.format(input_path)) else: log.info(f'Not a QUARANTINE database file: {input_path}')
def read_plist_file(input_file): success, plist, error = CommonFunctions.ReadPlist(input_file) if success: return plist else: log.error(error) return None
def ProcessMRU(office_items, app_name, mru_list, user, source): for mru in mru_list: try: access_data = mru.get('Access Date', '') access_time = None try: v = struct.unpack('<I', access_data[2:6])[0] access_time = CommonFunctions.ReadMacHFSTime(v) except (IndexError, ValueError): log.exception('') path = '' alias_data = mru.get('File Alias', None) if alias_data: try: alias_properties = next( AliasParser.parse(source, 0, alias_data)) #log.debug(alias_properties) path = alias_properties.get('path', '') except (IndexError, ValueError, KeyError, TypeError): log.exception('') o_item = MSOfficeItem(app_name, access_time, 'MRU', path, '', user, source) office_items.append(o_item) except (ValueError, TypeError): log.exception('')
def Plugin_Start_Standalone(input_files_list, output_params): log.info("Module Started as standalone") for input_path in input_files_list: log.debug("Input file passed was: " + input_path) accounts = [] if os.path.basename(input_path).upper() == 'MOBILEMEACCOUNTS.PLIST': success, plist, error = CommonFunctions.ReadPlist(input_path) if success: ReadMobileMeAccountPlist(plist, accounts, input_path, '') else: log.error('Failed to read plist - {} Error was {}'.format( input_path, error)) elif os.path.basename(input_path).upper() in ('ACCOUNTS1.SQLITE', 'ACCOUNTS2.SQLITE', 'ACCOUNTS3.SQLITE', 'ACCOUNTS4.SQLITE'): db = OpenDb(input_path) if db != None: ReadAccountsDb(db, accounts, input_path, '') db.close() else: log.info("Unknown file type: {}".format( os.path.basename(input_path))) if len(accounts) > 0: PrintAll(accounts, output_params, input_path) else: log.info('No accounts found in {}'.format(input_path))
def ReadLastSessionPlist(plist, safari_items, source_path, user): try: version = plist['SessionVersion'] if version != '1.0': log.warning('SessionVersion is {}, this may not parse properly!'.format(version)) except KeyError: log.error('SessionVersion not found') try: session_windows = plist['SessionWindows'] for windows in session_windows: selectedIndex = windows.get('SelectedTabIndex', None) index = 0 for tab in windows.get('TabStates', []): info = 'SELECTED WINDOW' if index == selectedIndex else '' date_closed = tab.get('DateClosed', '') log.debug(date_closed) if date_closed: if info: info += ', TAB_CLOSED_DATE=' + str(date_closed) else: info = 'TAB_CLOSED_DATE=' + str(date_closed) si = SafariItem(SafariItemType.LASTSESSION, tab.get('TabURL', ''), tab.get('TabTitle', ''), CommonFunctions.ReadMacAbsoluteTime(tab.get('LastVisitTime', '')), info, user, source_path) # Skipping SessionState(its encrypted) & TabIdentifier safari_items.append(si) index += 1 except KeyError as ex: log.error('SessionWindows not found or unable to parse. Error was {}'.format(str(ex)))
def ParseCFURLEntry(db, cfurl_cache_artifacts, username, app_bundle_id, cfurl_cache_db_path): db.row_factory = sqlite3.Row tables = CommonFunctions.GetTableNames(db) schema_version = 0 if 'cfurl_cache_schema_version' in tables: schema_version = CheckSchemaVersion(db) else: log.debug('There is no cfurl_cache_schema_version table.') if 'cfurl_cache_response' in tables: if schema_version in (0, 202): query = """SELECT entry_ID, time_stamp, request_key, request_object, response_object, isDataOnFS, receiver_data FROM cfurl_cache_response JOIN cfurl_cache_blob_data USING (entry_ID) JOIN cfurl_cache_receiver_data USING (entry_ID)""" cursor = db.execute(query) for row in cursor: http_req_method, req_headers = ParseRequestObject(row['request_object']) http_status, resp_headers = ParseResponseObject(row['response_object']) if type(row['receiver_data']) == bytes: received_data = row['receiver_data'] elif type(row['receiver_data']) == str: received_data = row['receiver_data'].encode() else: log.error('Unknown type of "receiver_data": {}'.format(type(row['receiver_data']))) continue item = CfurlCacheItem(row['time_stamp'], row['request_key'], http_req_method, req_headers, http_status, resp_headers, row['isDataOnFS'], received_data, username, app_bundle_id, cfurl_cache_db_path) cfurl_cache_artifacts.append(item)
def ParseRegistrationDB(conn,office_reg_items, user, source): conn.row_factory = sqlite3.Row try: query = str("SELECT t2.node_id as id, t2.write_time as keyLastWriteTime, path as key, HKEY_CURRENT_USER_values.name as valueName, HKEY_CURRENT_USER_values.value as value, HKEY_CURRENT_USER_values.type as valueType from ( " " WITH RECURSIVE " " under_software(path, name, node_id, write_time) AS ( " " VALUES('Software','',1, NULL) " " UNION ALL " " SELECT under_software.path || '\\' || HKEY_CURRENT_USER.name, HKEY_CURRENT_USER.name, HKEY_CURRENT_USER.node_id, HKEY_CURRENT_USER.write_time " " FROM HKEY_CURRENT_USER JOIN under_software ON HKEY_CURRENT_USER.parent_id=under_software.node_id " " ORDER BY 1 " " ) " " SELECT name, path, write_time, node_id FROM under_software " " ) as t2 LEFT JOIN HKEY_CURRENT_USER_values on HKEY_CURRENT_USER_values.node_id=t2.node_id ") cursor = conn.execute(query) data = cursor.fetchall() try: for row in data: item = MSOfficeRegItem(row['id'], CommonFunctions.ReadWindowsFileTime(GetUint64Value(row['keyLastWriteTime'])), GetStringRepresentation(row['key']), GetStringValueType(row['valueType']), GetStringRepresentation(row['valueName']), GetStringRepresentation(row['value'], row['valueType']), user, source) office_reg_items.append(item) except (sqlite3.Error, ValueError, IndexError): log.exception('') except sqlite3.Error as ex: log.exception('Error executing query : {}'.format(query))
def ParseInstallHistoryFile(input_file): history = [] success, plist, error = CommonFunctions.ReadPlist(input_file) if success: ReadInstallHistoryPlist(plist, history) else: log.error("Could not open plist, error was : " + error) return history
def Plugin_Start_Standalone(input_files_list, output_params): log.info("Module Started as standalone") for input_path in input_files_list: log.debug("Input file passed was: " + input_path) office_items = [] office_reg_items = [] if input_path.endswith('com.microsoft.office.plist'): success, plist, error = CommonFunctions.ReadPlist(input_path) if success: ProcessOfficePlist(plist, office_items, '', input_path) else: log.error('Failed to read file: {}. {}'.format( input_path, error)) else: basename = path.basename(input_path) if basename.startswith('com.microsoft.') and basename.endswith( '.plist'): success, plist, error = CommonFunctions.ReadPlist(input_path) if success: if basename.endswith('securebookmarks.plist'): app_name = basename[14:-22] ProcessOfficeAppSecureBookmarksPlist( plist, office_items, app_name, '', input_path) else: app_name = basename[14:-6] ProcessOfficeAppPlist(plist, office_items, app_name, '', input_path) else: log.error('Failed to read file: {}. {}'.format( input_path, error)) elif input_path.endswith('MicrosoftRegistrationDB.reg'): conn = OpenDb(input_path) if conn: ParseRegistrationDB(conn, office_reg_items, '', input_path) conn.close() if len(office_items) > 0: PrintItems(office_items, output_params) else: log.info('No office items found in {}'.format(input_path)) if len(office_reg_items) > 0: PrintRegItems(office_reg_items, output_params) else: log.info('No office registries found')
def ParseWifi(input_file): networks = [] success, plist, error = CommonFunctions.ReadPlist(input_path) if success: ReadAirportPrefPlist(plist, networks) else: log.error("Could not open plist, error was : " + error) return networks
def OpenDb(inputPath): log.info("Processing file " + inputPath) try: conn = CommonFunctions.open_sqlite_db_readonly(inputPath) log.debug("Opened database successfully") return conn except sqlite3.Error: log.exception("Failed to open database, is it a valid DB?") return None
def ProcessFile(windows_plist_file_path, data_file_path, terminals): success, windows_plist, error = CommonFunctions.ReadPlist( windows_plist_file_path) if success: with open(data_file_path, 'rb') as f: all_data = f.read() # Should be a small file Process(windows_plist, all_data, terminals, '', data_file_path) else: log.error( f"Error reading plist - {windows_plist_file_path}. Error={error}")
def parse_user_acct_plist(plist, user_accounts, plist_path): '''Parse plist and add items to app list''' for user, items in plist.items(): uid = items.get('uid', '') for k, v in items.items(): if k == 'uid': continue elif isinstance(v, list): # tty or console session_name = k for session in v: ua = UserAcct( session_name, CommonFunctions.ReadMacAbsoluteTime( session.get('inTime', None)), CommonFunctions.ReadMacAbsoluteTime( session.get('outTime', None)), uid, user, plist_path) user_accounts.append(ua)
def parse_cookie_file(cookie_file, cookies, user_name, file_path): '''Parse .binarycookies or .cookies file''' data = cookie_file.read() if data[0:4] == b'cook': num_pages = struct.unpack('>I', data[4:8])[0] if num_pages == 0: return page_sizes = [] pos = 8 for x in range(num_pages): page_sizes.append(struct.unpack('>I', data[pos : pos + 4])[0]) pos +=4 page_start = pos for page_size in page_sizes: # read page pos = page_start pos += 4 num_cookies = struct.unpack('<I', data[pos : pos + 4])[0] pos += 4 offsets = [] for y in range(num_cookies): offsets.append(struct.unpack('<I', data[pos : pos + 4])[0]) pos += 4 for offset in offsets: cookie_data = data[page_start + offset : page_start + page_size] length, unk1, flags, unk2, url_offset, \ name_offset, path_offset, value_offset, \ unk3, expiry_time, create_time = struct.unpack('<IIIIIIIIQdd', cookie_data[0:56]) url = read_cstring(cookie_data[url_offset:]) name = read_cstring(cookie_data[name_offset:]) path = read_cstring(cookie_data[path_offset:]) value = read_cstring(cookie_data[value_offset:]) if url and url[0] == '.': url = url[1:] expiry_time = CommonFunctions.ReadMacAbsoluteTime(expiry_time) create_time = CommonFunctions.ReadMacAbsoluteTime(create_time) cookies.append(Cookie(url, create_time, expiry_time, name, path, value, user_name, file_path)) page_start = page_start + page_size else: log.error('Not the expected header for cookie file. Got {} instead of "cook"'.format(str(data[0:4])))
def __init__(self, file_label, parent_mod_date, file_mod_date, recent_used, file_type, file_data, guid, user, source_path): self.file_label = file_label if parent_mod_date and (parent_mod_date > 0xFFFFFFFF): # On High Sierra and above.. parent_mod_date = parent_mod_date & 0xFFFFFFFF # Killing upper 32 bits! # Upper 32 bits maybe the finer resolution (microseconds?). if file_mod_date and (file_mod_date > 0xFFFFFFFF): # On High Sierra and above.. file_mod_date = file_mod_date & 0xFFFFFFFF # Killing upper 32 bits! self.parent_mod_date = CommonFunctions.ReadMacHFSTime(parent_mod_date) self.file_mod_date = CommonFunctions.ReadMacHFSTime(file_mod_date) self.recent_used = recent_used self.file_type = file_type self.file_path = file_data self.guid = guid self.user = user self.path = source_path
def ParseWifi(input_path): networks = [] success, plist, error = CommonFunctions.ReadPlist(input_path) if success: if os.path.basename(input_path) == 'com.apple.wifi.known-networks.plist': ReadKnownNetworksPlist(plist, networks, input_path) else: ReadAirportPrefPlist(plist, networks, input_path) else: log.error ("Could not open plist, error was : " + error) return networks
def ReadTopSitesDb(chrome_artifacts, db, file_size, user, source): try: db.row_factory = sqlite3.Row tables = CommonFunctions.GetTableNames(db) if 'topsites' in tables: # meta.version == 4 cursor = db.cursor() query = "SELECT url, url_rank, title from top_sites ORDER BY url_rank ASC" cursor = db.execute(query) for row in cursor: item = ChromeItem(ChromeItemType.TOPSITE, row['url'], row['title'], None, None, None, None, f"URL_RANK={row['url_rank']}", user, source) chrome_artifacts.append(item) elif 'thumbnails' in tables: # meta.version == 3 cursor = db.cursor() query = "SELECT url, url_rank, title, last_updated from thumbnails ORDER BY url_rank ASC" cursor = db.execute(query) for row in cursor: item = ChromeItem(ChromeItemType.TOPSITE, row['url'], row['title'], CommonFunctions.ReadChromeTime(row['last_updated']), None, None, None, f"URL_RANK={row['url_rank']}", user, source) chrome_artifacts.append(item) except sqlite3.Error: log.exception('DB read error from ReadTopSitesDb()')
def Plugin_Start_Standalone(input_files_list, output_params): log.info("Module Started as standalone") for input_path in input_files_list: log.debug("Trying to read plist : " + input_path) success, plist, error = CommonFunctions.ReadPlist(input_path) if success: ProcessActiveDirectoryPlist(input_path, plist) WriteList('domain details', 'Domain_ActiveDirectory', ad_details, ad_info, mac_info.output_params, input_path) else: log.error("Failed to read plist " + input_path + " Error was: " + error)
def Plugin_Start_Standalone(input_files_list, output_params): for input_file in input_files_list: cache_list = [] success, plist, error = CommonFunctions.ReadPlist(input_file) if success: cache_list = ReadBluetoothPlist(plist) else: log.error(error) if len(cache_list) > 0: PrintAll(cache_list, output_params, input_file) else: log.debug("No bluetooth devices found")
def ReadAttPathFromPlist(plist_blob): '''For NotesV2, read plist and get path''' f = BytesIO(plist_blob) success, plist, error = CommonFunctions.ReadPlist(f) if success: try: path = plist['$objects'][2] return path except (KeyError, IndexError): log.exception('Could not fetch attachment path from plist') else: log.error("Invalid plist in table. " + error) return ''
def ReadNotes(db, notes, source, user, is_ios): '''Read Notestore.sqlite''' if IsHighSierraOrAboveDb(db): ReadNotesHighSierraAndAbove(db, notes, source, user, is_ios) return if CommonFunctions.ColumnExists(db, 'ZICCLOUDSYNCINGOBJECT', 'ZISPASSWORDPROTECTED'): enc_possible = True else: enc_possible = False query1 = " SELECT n.Z_12FOLDERS as folder_id , n.Z_9NOTES as note_id, d.ZDATA as data, " + ("c1.ZISPASSWORDPROTECTED as encrypted, c1.ZPASSWORDHINT, " if enc_possible else "") + \ " c2.ZTITLE2 as folder, c2.ZDATEFORLASTTITLEMODIFICATION as folder_title_modified, " \ " c1.ZCREATIONDATE as created, c1.ZMODIFICATIONDATE1 as modified, c1.ZSNIPPET as snippet, c1.ZTITLE1 as title, c1.ZACCOUNT2 as acc_id, " \ " c5.ZACCOUNTTYPE as acc_type, c5.ZIDENTIFIER as acc_identifier, c5.ZNAME as acc_name, " \ " c3.ZMEDIA as media_id, c3.ZFILESIZE as att_filesize, c3.ZMODIFICATIONDATE as att_modified, c3.ZPREVIEWUPDATEDATE as att_previewed, c3.ZTITLE as att_title, c3.ZTYPEUTI, c3.ZIDENTIFIER as att_uuid, " \ " c4.ZFILENAME, c4.ZIDENTIFIER as media_uuid " \ " FROM Z_12NOTES as n " \ " LEFT JOIN ZICNOTEDATA as d ON d.ZNOTE = n.Z_9NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c1 ON c1.Z_PK = n.Z_9NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c2 ON c2.Z_PK = n.Z_12FOLDERS " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c3 ON c3.ZNOTE = n.Z_9NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c4 ON c3.ZMEDIA = c4.Z_PK " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c5 ON c5.Z_PK = c1.ZACCOUNT2 " \ " ORDER BY note_id " query2 = " SELECT n.Z_11FOLDERS as folder_id , n.Z_8NOTES as note_id, d.ZDATA as data, " + ("c1.ZISPASSWORDPROTECTED as encrypted, c1.ZPASSWORDHINT, " if enc_possible else "") + \ " c2.ZTITLE2 as folder, c2.ZDATEFORLASTTITLEMODIFICATION as folder_title_modified, " \ " c1.ZCREATIONDATE as created, c1.ZMODIFICATIONDATE1 as modified, c1.ZSNIPPET as snippet, c1.ZTITLE1 as title, c1.ZACCOUNT2 as acc_id, " \ " c5.ZACCOUNTTYPE as acc_type, c5.ZIDENTIFIER as acc_identifier, c5.ZNAME as acc_name, " \ " c3.ZMEDIA as media_id, c3.ZFILESIZE as att_filesize, c3.ZMODIFICATIONDATE as att_modified, c3.ZPREVIEWUPDATEDATE as att_previewed, c3.ZTITLE as att_title, c3.ZTYPEUTI, c3.ZIDENTIFIER as att_uuid, " \ " c4.ZFILENAME, c4.ZIDENTIFIER as media_uuid " \ " FROM Z_11NOTES as n " \ " LEFT JOIN ZICNOTEDATA as d ON d.ZNOTE = n.Z_8NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c1 ON c1.Z_PK = n.Z_8NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c2 ON c2.Z_PK = n.Z_11FOLDERS " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c3 ON c3.ZNOTE = n.Z_8NOTES " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c4 ON c3.ZMEDIA = c4.Z_PK " \ " LEFT JOIN ZICCLOUDSYNCINGOBJECT as c5 ON c5.Z_PK = c1.ZACCOUNT2 " \ " ORDER BY note_id " cursor, error1 = ExecuteQuery(db, query1) if cursor: ReadQueryResults(cursor, notes, enc_possible, user, source) else: # Try query2 cursor, error2 = ExecuteQuery(db, query2) if cursor: ReadQueryResults(cursor, notes, enc_possible, user, source) else: log.error( 'Query execution failed.\n Query 1 error: {}\n Query 2 error: {}' .format(error1, error2))
def PrintAll(revisions, output_params): revisions_info = [('File_Inode', DataType.INTEGER), ('Storage_ID', DataType.INTEGER), ('File_Path', DataType.TEXT), ('Exists_On_Disk', DataType.TEXT), ('File_Last_Seen_UTC', DataType.DATE), ('Generation_Added_UTC', DataType.DATE), ('Generation_Path', DataType.TEXT), ('Source', DataType.TEXT)] log.info(str(len(revisions)) + " revision item(s) found") revisions_list = [] for q in revisions: q_item = [ q.inode, q.storage_id, q.path, q.exists, CommonFunctions.ReadUnixTime(q.last_seen), CommonFunctions.ReadUnixTime(q.generation_added), q.generation_path, q.source_file ] revisions_list.append(q_item) WriteList("revisions information", "DocumentRevisions", revisions_list, revisions_info, output_params, '')
def ReadQueryResults(cursor, notes, enc_possible, user, source): for row in cursor: try: att_path = '' if row['media_id'] != None: att_path = row['ZFILENAME'] if enc_possible and row['encrypted'] == 1: text_content = '' pw_hint = row['ZPASSWORDHINT'] else: pw_hint = '' data = GetUncompressedData(row['data']) text_content = ProcessNoteBodyBlob(data) note = Note(row['note_id'], row['folder'], row['title'], row['snippet'], text_content, row['att_uuid'], att_path, row['acc_name'], row['acc_identifier'], '', CommonFunctions.ReadMacAbsoluteTime(row['created']), CommonFunctions.ReadMacAbsoluteTime(row['modified']), 'NoteStore', row['encrypted'] if enc_possible else 0, pw_hint, user, source) notes.append(note) except sqlite3.Error: log.exception('Error fetching row data')
def ReadBrowserStateDb(conn, safari_items, source_path, user): try: conn.row_factory = sqlite3.Row cursor = conn.execute("""SELECT t.id, url, title, session_data, t.uuid FROM tabs t LEFT JOIN tab_sessions s on s.tab_uuid=t.uuid""") try: for row in cursor: try: si = SafariItem(SafariItemType.TAB, row['url'], row['title'], '', f'Tab UUID={row["uuid"]}', user, source_path) safari_items.append(si) plist_data = row['session_data'] if plist_data and len(plist_data) > 10: f = io.BytesIO(plist_data[4:]) success, plist, error = CommonFunctions.ReadPlist(f) if success: history = plist.get('SessionHistory', None) if history: #current_session = history.get('SessionHistoryCurrentIndex', 0) entries = history.get('SessionHistoryEntries', []) for index, entry in enumerate(entries): url = entry.get('SessionHistoryEntryURL', '') title = entry.get( 'SessionHistoryEntryTitle', '') if url == row['url']: continue # same as current tab, skip it si = SafariItem( SafariItemType.TABHISTORY, url, title, '', f'Tab UUID={row["uuid"]} index={index}', user, source_path) safari_items.append(si) else: log.error( f'Failed to read plist for tab {row["uuid"]}, {row["id"]}. {error}' ) except sqlite3.Error as ex: log.exception("Error while fetching row data") except sqlite3.Error as ex: log.exception("Db cursor error while reading file " + source_path) conn.close() except sqlite3.Error as ex: log.exception("Sqlite error")