def get_medicalID(files_found, report_folder, seeker): data_list = [] file_found = str(files_found[0]) with open(file_found, 'rb') as f: deserialized_plist = nd.deserialize_plist(f) for key, value in deserialized_plist.items(): key_name = get_name(key) if isinstance(value, dict): unit = value.get('UnitKey', {}).get('HKUnitStringKey', '') val = str(value.get('ValueKey', '')) if unit: val += ' ' + unit data_list.append((key_name, val)) elif isinstance(value, list): # not seen! data_list.append((key_name, str(value))) else: data_list.append((key_name, value)) if len(data_list) > 0: description = 'User entered Medical information about self' report = ArtifactHtmlReport('Medical ID') report.start_artifact_report(report_folder, 'Health Info', description) report.add_script() data_headers = ('Key', 'Value') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Medical ID' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Medical ID' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data on Medical ID')
def get_filesAppsm(files_found, report_folder, seeker): for file_found in files_found: file_found = str(file_found) if file_found.endswith('smartfolders.db'): break db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT * FROM FILENAMES ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: output_file = open(os.path.join(report_folder, row[2]+'.bplist'), "wb") output_file.write(row[1]) output_file.close() with open(os.path.join(report_folder, row[2]+'.bplist'), "rb") as f: deserialized_plist = nd.deserialize_plist(f) for x, y in deserialized_plist.items(): if x == '_creationDate': creationdate = y if x == '_contentModificationDate': contentmodificationdate = y if x == '_flags': flags = y if x == '_userInfo': userinfo = y if x == '_childItemCount': childitemcount = y lasthitdate = datetime.datetime.fromtimestamp(row[3]) data_list.append((lasthitdate, row[0], row[2],row[4], creationdate, contentmodificationdate, userinfo, childitemcount, flags)) description = 'Files App - Files stored in the "On my iPad" area.' report = ArtifactHtmlReport('Files App - Filenames') report.start_artifact_report(report_folder, 'Files App - Filenames', description) report.add_script() data_headers = ('Last Hit Date','Folder ID','Filename','Frequency at Las Hit Date','Creation Date','Modification Date','User Info','Child Item Count','Flags' ) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Files App - Filenames' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Files App - Filenames' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Files App - Filenames data available') db.close()
def ReadSFL2Plist(file_handle, recent_items, source, user=''): basename = os.path.basename(source).lower() try: plist = nd.deserialize_plist(file_handle) for item in plist['items']: name = item.get('Name', '') uuid = item.get('uuid', '') recent_type = RecentType.UNKNOWN if basename.find('recentservers') >= 0: recent_type = RecentType.SERVER elif basename.find('recenthosts') >= 0: recent_type = RecentType.HOST elif basename.find('recentdocuments') >= 0: recent_type = RecentType.DOCUMENT elif basename.find('recentapplications') >= 0: recent_type = RecentType.APPLICATION elif basename.find('favoritevolumes') >= 0: recent_type = RecentType.VOLUME ri = RecentItem(name, '', 'uuid={}'.format(uuid), source, recent_type, user) recent_items.append(ri) data = item.get('Bookmark', None) if data: if isinstance(data, dict): data = data.get('NS.data', None) if data: ri.ReadBookmark(data) else: ri.ReadBookmark(data) except (KeyError, nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError): log.exception('Error reading SFL2 plist')
def Plugin_Start(mac_info): '''Main Entry point function for plugin''' applist_path = '{}/Library/Application Support/com.apple.spotlight/appList.dat' # PList within each users directory. apps = [] processed_paths = [] for user in mac_info.users: user_name = user.user_name if user.home_dir == '/private/var/empty': continue # Optimization, nothing should be here! elif user.home_dir == '/private/var/root': user_name = 'root' # Some other users use the same root folder, we will list such all users as 'root', as there is no way to tell if user.home_dir in processed_paths: continue # Avoid processing same folder twice (some users have same folder! (Eg: root & daemon)) processed_paths.append(user.home_dir) source_path = applist_path.format(user.home_dir) if mac_info.IsValidFilePath(source_path): # Determine if the above path is valid. mac_info.ExportFile(source_path, __Plugin_Name, user_name + "_", False) f = mac_info.Open(source_path) if f != None: deserialized_plist = nd.deserialize_plist(f) if deserialized_plist: parse_appList_plist(deserialized_plist, apps, user_name, source_path) else: log.error('Could not open file {}'.format(source_path)) if len(apps) > 0: PrintAll(apps, mac_info.output_params, '') else: log.info('No apps found')
def get_decoded_plist_data(data): data_size = len(data) name = '' if data_size > 8: name_len = struct.unpack('>I', data[4:8])[0] name = data[8:8 + name_len] log.debug('NSName = {}'.format(name)) rchv = data[8 + name_len:12 + name_len] # "rchv" if rchv != b"rchv": log.warning('magic was not "rchv", it was {}'.format(str(rchv))) nsa_plist_len = struct.unpack('>I', data[12 + name_len:16 + name_len])[0] nsa_plist = data[16 + name_len:16 + name_len + nsa_plist_len] f = io.BytesIO(nsa_plist) try: deserialized_plist = nd.deserialize_plist(f) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: log.exception("") f.close() return (name, None) f.close() return (name, deserialized_plist) else: log.warning('Plist seems empty!') return (name, None)
def get_textinputTyping(files_found, report_folder, seeker): count = 0 for file_found in files_found: logfunc(str(file_found)) count = count + 1 data_list = [] with open(file_found, 'rb') as f: try: deserialized_plist = nd.deserialize_plist(f) #print(deserialized_plist) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: # These are all possible errors from libraries imported print('Had exception: ' + str(ex)) deserialized_plist = None lenofjson = len(deserialized_plist['alignedEntries']) testrun = deserialized_plist['alignedEntries'][lenofjson - 1] for x, y in testrun['originalWord'].items(): #print(x) if x == 'documentState': finalvalue = (y['contextBeforeInput']) if x == 'keyboardState': for a, b in y.items(): if a == 'inputContextHistory': for c, d in b.items(): if c == 'pendingEntries': #print(d) #this is a list for e in d: data_list.append( (e['timestamp'], e['senderIdentifier'], e['text'], '')) data_list.append(('', '', finalvalue, 'True')) entries = len(data_list) if entries > 0: report = ArtifactHtmlReport('Text Input Typing') report.start_artifact_report(report_folder, f'Messages {count}') report.add_script() data_headers = ('Timestamp', 'Sender Identifier', 'Text', 'contextBeforeInput') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'Messages {count}' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'Messages {count}' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc(f"No Messages {count} available")
def get_bundle_id_and_names_from_plist(library_plist_file_path): '''Parses Library.plist and returns a dictionary where Key=Bundle_ID, Value=Bundle_Name''' bundle_info = {} f = open(library_plist_file_path, 'rb') plist = nd.deserialize_plist(f) for k, v in plist.items(): bundle_info[v] = k f.close() return bundle_info
def read_appList_plist_file(input_file, apps): try: with open(input_file, 'rb') as f: deserialized_plist = nd.deserialize_plist(f) parse_appList_plist(deserialized_plist, apps, '', input_file) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException,plistlib.InvalidFileException, nd.ccl_bplist.BplistError, TypeError, OverflowError, ValueError, KeyError, IndexError, OSError): log.exception("Could not open/process plist")
def get_cloudkitServerSharedData(file_found, report_folder, seeker): user_dictionary = {} db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' SELECT Z_PK, ZSERVERSHAREDATA FROM ZICCLOUDSYNCINGOBJECT WHERE ZSERVERSHAREDATA NOT NULL ''') all_rows = cursor.fetchall() for row in all_rows: filename = os.path.join(report_folder, 'zserversharedata_'+str(row[0])+'.bplist') output_file = open(filename, "wb") output_file.write(row[1]) output_file.close() deserialized_plist = nd.deserialize_plist(io.BytesIO(row[1])) for item in deserialized_plist: if 'Participants' in item: for participant in item['Participants']: record_id = participant['UserIdentity']['UserRecordID']['RecordName'] email_address = participant['UserIdentity']['LookupInfo']['EmailAddress'] phone_number = participant['UserIdentity']['LookupInfo']['PhoneNumber'] first_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.givenName'] middle_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.middleName'] last_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.familyName'] name_prefix = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.namePrefix'] name_suffix = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.nameSuffix'] nickname = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.nickname'] user_dictionary[record_id] = [record_id, email_address, phone_number, name_prefix, first_name, middle_name, last_name, name_suffix, nickname] db.close() # Build the array after dealing with all the files user_list = list(user_dictionary.values()) if len(user_list) > 0: description = 'CloudKit Participants - Cloudkit accounts participating in CloudKit shares.' report = ArtifactHtmlReport('Participants') report.start_artifact_report(report_folder, 'Participants', description) report.add_script() user_headers = ('Record ID','Email Address','Phone Number','Name Prefix','First Name','Middle Name','Last Name','Name Suffix','Nickname') report.write_artifact_data_table(user_headers, user_list, '', write_location=False) report.end_artifact_report() tsvname = 'Cloudkit Participants' tsv(report_folder, user_headers, user_list, tsvname) else: logfunc('No Cloudkit - Cloudkit Participants data available')
def ReadCloudTabsDb(conn, safari_items, source_path, user): try: conn.row_factory = sqlite3.Row cursor = conn.execute( """SELECT device_name, tab_uuid, t.system_fields, title, url, is_showing_reader, is_pinned FROM cloud_tabs t LEFT JOIN cloud_tab_devices d on d.device_uuid=t.device_uuid ORDER BY device_name""") try: for row in cursor: try: pinned = row['is_pinned'] system_fields = row['system_fields'] created = '' modified = '' if system_fields: serialized_plist_file_obj = io.BytesIO(system_fields) try: deserialized_plist = nd.deserialize_plist( serialized_plist_file_obj) created = GetItemFromCloudDbPlist( deserialized_plist, 'RecordCtime') modified = GetItemFromCloudDbPlist( deserialized_plist, 'RecordMtime') except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: log.exception('plist deserialization error') si = SafariItem( SafariItemType.CLOUDTAB, row['url'], row['title'], created, f'Modified={modified}' + (' pinned=1' if pinned else ''), user, source_path) safari_items.append(si) except sqlite3.Error as ex: log.exception("Error while fetching row data") except sqlite3.Error as ex: log.exception("Db cursor error while reading file " + source_path) conn.close() except sqlite3.Error as ex: log.exception("Sqlite error")
def deserialize_plist(plist_path): """ Use nska_deserialize to deserialize our plists Goes right off of: https://pypi.org/project/nska-deserialize/ """ with open(plist_path, 'rb') as f: try: deserialized_plist = nd.deserialize_plist(f) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: # These are all possible errors from libraries imported deserialized_plist = None return deserialized_plist
def get_photosMetadata(files_found, report_folder, seeker): iOSversion = scripts.artifacts.artGlobals.versionf if version.parse(iOSversion) < version.parse("12"): logfunc("Unsupported version for Photos.sqlite metadata on iOS " + iOSversion) elif (version.parse(iOSversion) >= version.parse("13")) & ( version.parse(iOSversion) < version.parse("14")): file_found = str(files_found[0]) #os.chmod(file_found, 0o0777) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT DateTime( ZGENERICASSET.ZDATECREATED + 978307200, 'UNIXEPOCH' ) AS 'DateCreated', ZGENERICASSET.Z_PK AS 'GenericAsset_zpk', ZGENERICASSET.ZADDITIONALATTRIBUTES AS 'AddAttributes_Key', ZDETECTEDFACE.ZASSET AS 'DetectedFaceAsset', CASE ZGENERICASSET.ZKIND WHEN 0 THEN 'Photo' WHEN 1 THEN 'Video' END AS 'Kind', ZADDITIONALASSETATTRIBUTES.ZEXIFTIMESTAMPSTRING AS 'EXIFtimestamp', DateTime( ZADDITIONALASSETATTRIBUTES.ZSCENEANALYSISTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'SceneAnalysisTimeStamp', DateTime( ZGENERICASSET.ZANALYSISSTATEMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'AnalysisStateModificationDate', DateTime( ZGENERICASSET.ZADDEDDATE + 978307200, 'UNIXEPOCH' ) AS 'AddDate', ZGENERICASSET.ZFILENAME AS 'FileName', ZADDITIONALASSETATTRIBUTES.ZORIGINALFILENAME AS 'OriginalFilename', ZGENERICALBUM.ZTITLE AS 'AlbumTitle', ZADDITIONALASSETATTRIBUTES.ZCREATORBUNDLEID AS 'CreatorBundleID', ZADDITIONALASSETATTRIBUTES.ZEDITORBUNDLEID AS 'EditorBundleID', ZGENERICASSET.ZDIRECTORY AS 'Directory', ZGENERICASSET.ZUNIFORMTYPEIDENTIFIER AS 'UniformID', CASE ZGENERICASSET.ZSAVEDASSETTYPE WHEN 0 THEN 'Saved from other source' WHEN 2 THEN 'Photo Streams Data' WHEN 3 THEN 'Made/saved with this device' WHEN 4 THEN 'Default row' WHEN 7 THEN 'Deleted' ELSE ZGENERICASSET.ZSAVEDASSETTYPE END AS 'SavedAssetType', CASE WHEN ZGENERICASSET.ZFACEAREAPOINTS > 0 THEN 'Yes' ELSE 'NA' END AS 'FaceDetectedinPhoto', ZPERSON.ZDISPLAYNAME AS 'DisplayName', ZPERSON.ZFULLNAME AS 'FullName', ZPERSON.ZFACECOUNT AS 'FaceCount', ZDETECTEDFACE.ZPERSON AS 'Person', ZPERSON.ZCONTACTMATCHINGDICTIONARY AS 'ContactBlob', ZPERSON.ZPERSONUUID as 'PersonUUID', ZDETECTEDFACE.ZQUALITYMEASURE AS 'DetectedFaceQuality', CASE ZDETECTEDFACE.ZAGETYPE WHEN 1 THEN 'Baby/Toddler' WHEN 2 THEN 'Baby/Toddler' WHEN 3 THEN 'Child/YoungAdult' WHEN 4 THEN 'YoungAdult/Adult' WHEN 5 THEN 'Adult' ELSE ZDETECTEDFACE.ZAGETYPE END AS 'AgeTypeEstimate', CASE ZDETECTEDFACE.ZGENDERTYPE WHEN 1 THEN 'Male' WHEN 2 THEN 'Female' ELSE ZDETECTEDFACE.ZGENDERTYPE END AS 'Gender', CASE ZDETECTEDFACE.ZGLASSESTYPE WHEN 3 THEN 'None' WHEN 2 THEN 'Sun' WHEN 1 THEN 'Eye' ELSE ZDETECTEDFACE.ZGLASSESTYPE END AS 'GlassesType', CASE ZDETECTEDFACE.ZFACIALHAIRTYPE WHEN 1 THEN 'None' WHEN 2 THEN 'Beard/Mustache' WHEN 3 THEN 'Goatee' WHEN 5 THEN 'Stubble' ELSE ZDETECTEDFACE.ZFACIALHAIRTYPE END AS 'FacialHairType', CASE ZDETECTEDFACE.ZBALDTYPE WHEN 2 THEN 'Bald' WHEN 3 THEN 'NotBald' ELSE ZDETECTEDFACE.ZBALDTYPE END AS 'Baldness', ZGENERICASSET.ZORIGINALCOLORSPACE AS 'ColorSpace', ZGENERICASSET.Zduration AS 'Duration', ZGENERICASSET.Zvideocpdurationvalue AS 'VideoDuration', CASE ZGENERICASSET.ZCOMPLETE WHEN 1 THEN 'Yes' END AS 'Complete', CASE ZGENERICASSET.ZVISIBILITYSTATE WHEN 0 THEN 'Visible' WHEN 1 THEN 'Photo Streams Data' WHEN 2 THEN 'Burst' ELSE ZVISIBILITYSTATE END AS 'VisibilityState', CASE ZGENERICASSET.ZFAVORITE WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'Favorite', CASE ZGENERICASSET.zhidden WHEN 0 THEN 'Not_Hidden' WHEN 1 THEN 'File_Hidden' ELSE ZGENERICASSET.zhidden END AS 'Hidden_File', CASE ZGENERICASSET.ZTRASHEDSTATE WHEN 1 THEN 'In_Trash' WHEN 0 THEN 'Not_In_Trash' ELSE ZGENERICASSET.ZTRASHEDSTATE END AS 'TrashState', DateTime( ZGENERICASSET.ZTRASHEDDATE + 978307200, 'UNIXEPOCH' ) AS 'FileTrashDate', ZADDITIONALASSETATTRIBUTES.ZVIEWCOUNT AS 'ViewCount', ZADDITIONALASSETATTRIBUTES.ZPLAYCOUNT AS 'PlayCount', ZADDITIONALASSETATTRIBUTES.ZSHARECOUNT AS 'ShareCount', DateTime( ZGENERICASSET.ZLASTSHAREDDATE + 978307200, 'UNIXEPOCH' ) AS 'LastSharedDate', DateTime( ZGENERICASSET.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'FileModificationDate', CASE ZGENERICASSET.ZHASADJUSTMENTS WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'Has_Adjustments', DateTime( ZGENERICASSET.ZADJUSTMENTTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'AdjustmentsTimeStamp', ZADDITIONALASSETATTRIBUTES.ZORIGINALFILESIZE AS 'OriginalFileSize', ZGENERICASSET.ZHEIGHT AS 'File_Height', ZADDITIONALASSETATTRIBUTES.ZORIGINALHEIGHT AS 'OrgFileHeight', ZGENERICASSET.ZWIDTH AS 'File_Width', ZADDITIONALASSETATTRIBUTES.ZORIGINALWIDTH AS 'OrgFileWidth', CASE ZGENERICASSET.ZORIENTATION WHEN 1 THEN 'Horizontal (left)' WHEN 3 THEN 'Horizontal (right)' WHEN 6 THEN 'Vertical (up)' WHEN 8 THEN 'Vertical (down)' ELSE ZORIENTATION END AS 'Orientation', CASE ZADDITIONALASSETATTRIBUTES.ZORIGINALORIENTATION WHEN 1 THEN 'Horizontal (left)' WHEN 3 THEN 'Horizontal (right)' WHEN 6 THEN 'Vertical (up)' WHEN 8 THEN 'Vertical (down)' ELSE ZORIENTATION END AS 'Org_Orientation', ZADDITIONALASSETATTRIBUTES.ZTIMEZONENAME AS 'TimeZoneName', ZADDITIONALASSETATTRIBUTES.ZTIMEZONEOFFSET AS 'TimeZoneOffset', ZADDITIONALASSETATTRIBUTES.ZINFERREDTIMEZONEOFFSET AS 'InferredTimeZoneOffset', ZGENERICASSET.ZLOCATIONDATA AS 'FileLocationData', CASE ZGENERICASSET.ZLATITUDE WHEN - 180.0 THEN '' ELSE ZGENERICASSET.ZLATITUDE END AS 'Latitude', CASE ZGENERICASSET.ZLONGITUDE WHEN - 180.0 THEN '' ELSE ZGENERICASSET.ZLONGITUDE END AS 'Longitude', CASE ZADDITIONALASSETATTRIBUTES.ZSHIFTEDLOCATIONISVALID WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'ShiftedLocationValid', CASE ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATAISVALID WHEN 0 THEN 'No_Check_SceneAnalysis' WHEN 1 THEN 'Yes_Check_SceneAnalysis' END AS 'ReverseLocationDataIsValid', ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATA AS 'OrgFileReverseLocationData', ZGENERICASSET.Zthumbnailindex AS 'ThumbnailIndex', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILWIDTH AS 'EmbeddedThumbnailWidth', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILHEIGHT AS 'EmbeddedThumbnailHeight', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILOFFSET AS 'EmbeddedThumbnailOffset', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILLENGTH AS 'EmbeddedThumbnailLenght', ZGENERICASSET.ZMOMENT AS 'MomentPK', DateTime( ZMOMENT.ZSTARTDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentStartDate', DateTime( ZMOMENT.Zrepresentativedate + 978307200, 'UNIXEPOCH' ) AS 'MomentRepresentativeDate', DateTime( ZMOMENT.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentModificationDate', DateTime( ZMOMENT.ZENDDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentEndDate', ZMOMENT.ZTITLE AS 'MomentTitle', CASE ZMOMENT.Zapproximatelatitude WHEN - 180.0 THEN '' ELSE ZMOMENT.Zapproximatelatitude END AS 'MomentApproxLatitude', CASE ZMOMENT.Zapproximatelongitude WHEN - 180.0 THEN '' ELSE ZMOMENT.Zapproximatelongitude END AS 'MomentApproxLongitude', ZGENERICASSET.ZUUID AS 'UUID', ZGENERICASSET.ZMEDIAGROUPUUID AS 'MediaGroupUUID', ZGENERICASSET.ZCLOUDASSETGUID AS 'CloudAssetGUID', ZADDITIONALASSETATTRIBUTES.ZPUBLICGLOBALUUID AS 'PublicGlobalUUID', ZADDITIONALASSETATTRIBUTES.ZMASTERFINGERPRINT AS 'MasterFingerprint', ZADDITIONALASSETATTRIBUTES.ZADJUSTEDFINGERPRINT AS 'AdjustedFingerprint' FROM ZGENERICASSET JOIN Z_PRIMARYKEY ON ZGENERICASSET.z_ent = Z_PRIMARYKEY.z_ent LEFT JOIN ZMOMENT ON ZGENERICASSET.ZMOMENT = ZMOMENT.Z_PK JOIN ZADDITIONALASSETATTRIBUTES ON ZGENERICASSET.ZADDITIONALATTRIBUTES = ZADDITIONALASSETATTRIBUTES.Z_PK LEFT JOIN ZDETECTEDFACE ON ZADDITIONALASSETATTRIBUTES.ZASSET = ZDETECTEDFACE.ZASSET LEFT JOIN ZPERSON ON ZPERSON.Z_PK = ZDETECTEDFACE.ZPERSON LEFT JOIN Z_26ASSETS ON ZGENERICASSET.Z_PK = Z_26ASSETS.Z_34ASSETS LEFT JOIN ZGENERICALBUM ON ZGENERICALBUM.Z_PK = Z_26ASSETS.Z_26ALBUMS """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] counter = 0 if usageentries > 0: for row in all_rows: postal_address = '' postal_address_subadminarea = '' postal_address_sublocality = '' if row[61] is not None: pathto = os.path.join( report_folder, 'ReverseLocationData' + str(counter) + '.bplist') with open(pathto, 'ab') as wf: wf.write(row[61]) with open(pathto, 'rb') as f: try: deserialized_plist = nd.deserialize_plist(f) postal_address = deserialized_plist[ 'postalAddress']['_formattedAddress'] postal_address_subadminarea = deserialized_plist[ 'postalAddress']['_subAdministrativeArea'] postal_address_sublocality = deserialized_plist[ 'postalAddress']['_subLocality'] except: logfunc( 'Error reading exported bplist from Asset PK' + row[0]) deserialized_plist = None data_list.append( (row[0], row[0], postal_address, postal_address_subadminarea, postal_address_sublocality, row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27], row[28], row[29], row[30], row[31], row[32], row[33], row[34], row[35], row[36], row[37], row[38], row[39], row[40], row[41], row[42], row[43], row[44], row[45], row[46], row[47], row[48], row[49], row[50], row[51], row[52], row[53], row[54], row[55], row[56], row[57], row[58], row[59], row[60], row[61], row[62], row[63], row[64], row[65], row[66], row[67], row[68], row[69], row[70], row[71], row[72], row[73], row[74], row[75], row[76], row[77], row[78], row[79], row[80])) counter += 1 description = '' report = ArtifactHtmlReport('Photos.sqlite') report.start_artifact_report(report_folder, 'Metadata', description) report.add_script() data_headers = ( 'Timestamp', 'Date Created', 'Postal Address', 'Postal Subadmin Area', 'Postal Sublocality', 'Generic Asset ZPK', 'Add Attributes Key', 'Detected Face Asset', 'Kind', 'EXIF Timestamp', 'Scene Analysis Timestamp', 'Analysis State Modified Date', 'Add Date', 'Filename', 'Original Filename', 'Album Title', 'Creator Bundle ID', 'Editor Bundle ID', 'Directory', 'Uniform ID', 'Saved Asset Type', 'Face Detected in Photo', 'Display Name', 'Full Name', 'Face Count', 'Person', 'Contact Blob', 'Person UUID', 'Detected Face Quality', 'Age Type Estimate', 'Gender', 'Glasses Type', 'Facial Hair Type', 'Baldness', 'Color Space', 'Duration', 'Video Duration', 'Complete', 'Visibility State', 'Favorite', 'Hidden Fie?', 'Trash State', 'File Trash Date', 'View Count', 'Play Count', 'Share Count', 'Last Shared Date', 'File Modification Date', 'Has Adjustments?', 'Adjustment Timestamp', 'Original File Size', 'File Height', 'Org File Height', 'File Width', 'Org File Width', 'Orientation', 'Org Orientation', 'Timezone Name', 'Timezone Offset', 'Infered Timezone Offset', 'File Location Data', 'Latitude', 'Longitude', 'Shifted Location Valid', 'Reverse Lcoation Data is Valid', 'Org File Reverse Location Data', 'Thumbnail Index', 'Embedded Thumbnail Width', 'Embedded Thumbnail Height', 'Embedded Thumbnail Offset', 'Embedded Thumbnail Lenght', 'Moment PK', 'Moment Start Date', 'Moment Representative Date', 'Moment Modification Date', 'Moment End Date', 'Moment Title', 'Moment Approx Latitude', 'Moment Approx Longitude', 'UUID', 'Media Group UUID', 'Cloud Assest GUID', 'Public Global UUID', 'Master Fingetprint', 'Adjusted Fingerprint') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Photos-sqlite Metadata' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Photos-sqlite Metadata' timeline(report_folder, tlactivity, data_list, data_headers) kmlactivity = 'Photos-sqlite Metadata' kmlgen(report_folder, kmlactivity, data_list, data_headers) else: logfunc('No data available for Photos.sqlite metadata') db.close() return elif (version.parse(iOSversion) >= version.parse("14")): file_found = str(files_found[0]) #os.chmod(file_found, 0o0777) db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(""" SELECT DateTime( ZASSET.ZDATECREATED + 978307200, 'UNIXEPOCH' ) AS 'DateCreated', ZASSET.Z_PK AS 'Asset_zpk', ZASSET.ZADDITIONALATTRIBUTES AS 'AddAttributes_Key', ZDETECTEDFACE.ZASSET AS 'DetectedFaceAsset', CASE ZASSET.ZKIND WHEN 0 THEN 'Photo' WHEN 1 THEN 'Video' END AS 'Kind', ZADDITIONALASSETATTRIBUTES.ZEXIFTIMESTAMPSTRING AS 'EXIFtimestamp', DateTime( ZADDITIONALASSETATTRIBUTES.ZSCENEANALYSISTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'SceneAnalysisTimeStamp', DateTime( ZASSET.ZANALYSISSTATEMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'AnalysisStateModificationDate', DateTime( ZASSET.ZADDEDDATE + 978307200, 'UNIXEPOCH' ) AS 'AddDate', ZASSET.ZFILENAME AS 'FileName', ZADDITIONALASSETATTRIBUTES.ZORIGINALFILENAME AS 'OriginalFilename', ZGENERICALBUM.ZTITLE AS 'AlbumTitle', ZADDITIONALASSETATTRIBUTES.ZCREATORBUNDLEID AS 'CreatorBundleID', ZADDITIONALASSETATTRIBUTES.ZEDITORBUNDLEID AS 'EditorBundleID', ZASSET.ZDIRECTORY AS 'Directory', ZASSET.ZUNIFORMTYPEIDENTIFIER AS 'UniformID', CASE ZASSET.ZSAVEDASSETTYPE WHEN 0 THEN 'Saved from other source' WHEN 2 THEN 'Photo Streams Data' WHEN 3 THEN 'Made/saved with this device' WHEN 4 THEN 'Default row' WHEN 7 THEN 'Deleted' ELSE ZASSET.ZSAVEDASSETTYPE END AS 'SavedAssetType', CASE WHEN ZASSET.ZFACEAREAPOINTS > 0 THEN 'Yes' ELSE 'NA' END AS 'FaceDetectedinPhoto', ZPERSON.ZDISPLAYNAME AS 'DisplayName', ZPERSON.ZFULLNAME AS 'FullName', ZPERSON.ZFACECOUNT AS 'FaceCount', ZDETECTEDFACE.ZPERSON AS 'Person', ZPERSON.ZCONTACTMATCHINGDICTIONARY AS 'ContactBlob', ZPERSON.ZPERSONUUID as 'PersonUUID', ZDETECTEDFACE.ZQUALITYMEASURE AS 'DetectedFaceQuality', CASE ZDETECTEDFACE.ZAGETYPE WHEN 1 THEN 'Baby/Toddler' WHEN 2 THEN 'Baby/Toddler' WHEN 3 THEN 'Child/YoungAdult' WHEN 4 THEN 'YoungAdult/Adult' WHEN 5 THEN 'Adult' ELSE ZDETECTEDFACE.ZAGETYPE END AS 'AgeTypeEstimate', CASE ZDETECTEDFACE.ZGENDERTYPE WHEN 1 THEN 'Male' WHEN 2 THEN 'Female' ELSE ZDETECTEDFACE.ZGENDERTYPE END AS 'Gender', CASE ZDETECTEDFACE.ZGLASSESTYPE WHEN 3 THEN 'None' WHEN 2 THEN 'Sun' WHEN 1 THEN 'Eye' ELSE ZDETECTEDFACE.ZGLASSESTYPE END AS 'GlassesType', CASE ZDETECTEDFACE.ZFACIALHAIRTYPE WHEN 1 THEN 'None' WHEN 2 THEN 'Beard/Mustache' WHEN 3 THEN 'Goatee' WHEN 5 THEN 'Stubble' ELSE ZDETECTEDFACE.ZFACIALHAIRTYPE END AS 'FacialHairType', CASE ZDETECTEDFACE.ZBALDTYPE WHEN 2 THEN 'Bald' WHEN 3 THEN 'NotBald' ELSE ZDETECTEDFACE.ZBALDTYPE END AS 'Baldness', ZASSET.ZORIGINALCOLORSPACE AS 'ColorSpace', ZASSET.Zduration AS 'Duration', ZASSET.Zvideocpdurationvalue AS 'VideoDuration', CASE ZASSET.ZCOMPLETE WHEN 1 THEN 'Yes' END AS 'Complete', CASE ZASSET.ZVISIBILITYSTATE WHEN 0 THEN 'Visible' WHEN 1 THEN 'Photo Streams Data' WHEN 2 THEN 'Burst' ELSE ZVISIBILITYSTATE END AS 'VisibilityState', CASE ZASSET.ZFAVORITE WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'Favorite', CASE ZASSET.zhidden WHEN 0 THEN 'Not_Hidden' WHEN 1 THEN 'File_Hidden' ELSE ZASSET.zhidden END AS 'Hidden_File', CASE ZASSET.ZTRASHEDSTATE WHEN 1 THEN 'In_Trash' WHEN 0 THEN 'Not_In_Trash' ELSE ZASSET.ZTRASHEDSTATE END AS 'TrashState', DateTime( ZASSET.ZTRASHEDDATE + 978307200, 'UNIXEPOCH' ) AS 'FileTrashDate', ZADDITIONALASSETATTRIBUTES.ZVIEWCOUNT AS 'ViewCount', ZADDITIONALASSETATTRIBUTES.ZPLAYCOUNT AS 'PlayCount', ZADDITIONALASSETATTRIBUTES.ZSHARECOUNT AS 'ShareCount', DateTime( ZASSET.ZLASTSHAREDDATE + 978307200, 'UNIXEPOCH' ) AS 'LastSharedDate', DateTime( ZASSET.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'FileModificationDate', CASE ZASSET.ZHASADJUSTMENTS WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'Has_Adjustments', DateTime( ZASSET.ZADJUSTMENTTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'AdjustmentsTimeStamp', ZADDITIONALASSETATTRIBUTES.ZORIGINALFILESIZE AS 'OriginalFileSize', ZASSET.ZHEIGHT AS 'File_Height', ZADDITIONALASSETATTRIBUTES.ZORIGINALHEIGHT AS 'OrgFileHeight', ZASSET.ZWIDTH AS 'File_Width', ZADDITIONALASSETATTRIBUTES.ZORIGINALWIDTH AS 'OrgFileWidth', CASE ZASSET.ZORIENTATION WHEN 1 THEN 'Horizontal (left)' WHEN 3 THEN 'Horizontal (right)' WHEN 6 THEN 'Vertical (up)' WHEN 8 THEN 'Vertical (down)' ELSE ZORIENTATION END AS 'Orientation', CASE ZADDITIONALASSETATTRIBUTES.ZORIGINALORIENTATION WHEN 1 THEN 'Horizontal (left)' WHEN 3 THEN 'Horizontal (right)' WHEN 6 THEN 'Vertical (up)' WHEN 8 THEN 'Vertical (down)' ELSE ZORIENTATION END AS 'Org_Orientation', ZADDITIONALASSETATTRIBUTES.ZTIMEZONENAME AS 'TimeZoneName', ZADDITIONALASSETATTRIBUTES.ZTIMEZONEOFFSET AS 'TimeZoneOffset', ZADDITIONALASSETATTRIBUTES.ZINFERREDTIMEZONEOFFSET AS 'InferredTimeZoneOffset', ZASSET.ZLOCATIONDATA AS 'FileLocationData', CASE ZASSET.ZLATITUDE WHEN - 180.0 THEN '' ELSE ZASSET.ZLATITUDE END AS 'Latitude', CASE ZASSET.ZLONGITUDE WHEN - 180.0 THEN '' ELSE ZASSET.ZLONGITUDE END AS 'Longitude', CASE ZADDITIONALASSETATTRIBUTES.ZSHIFTEDLOCATIONISVALID WHEN 0 THEN 'No' WHEN 1 THEN 'Yes' END AS 'ShiftedLocationValid', CASE ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATAISVALID WHEN 0 THEN 'No_Check_SceneAnalysis' WHEN 1 THEN 'Yes_Check_SceneAnalysis' END AS 'ReverseLocationDataIsValid', ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATA AS 'OrgFileReverseLocationData', ZASSET.Zthumbnailindex AS 'ThumbnailIndex', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILWIDTH AS 'EmbeddedThumbnailWidth', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILHEIGHT AS 'EmbeddedThumbnailHeight', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILOFFSET AS 'EmbeddedThumbnailOffset', ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILLENGTH AS 'EmbeddedThumbnailLenght', ZASSET.ZMOMENT AS 'MomentPK', ZMOMENT.ZTITLE AS 'MomentTitle', DateTime( ZMOMENT.ZSTARTDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentStartDate', DateTime( ZMOMENT.Zrepresentativedate + 978307200, 'UNIXEPOCH' ) AS 'MomentRepresentativeDate', DateTime( ZMOMENT.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentModificationDate', DateTime( ZMOMENT.ZENDDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentEndDate', CASE ZMOMENT.ZTRASHEDSTATE WHEN 1 THEN 'In_Trash' WHEN 0 THEN 'Not_In_Trash' ELSE ZMOMENT.ZTRASHEDSTATE END AS 'MomentTrashState', CASE ZMOMENT.Zapproximatelatitude WHEN - 180.0 THEN '' ELSE ZMOMENT.Zapproximatelatitude END AS 'MomentApproxLatitude', CASE ZMOMENT.Zapproximatelongitude WHEN - 180.0 THEN '' ELSE ZMOMENT.Zapproximatelongitude END AS 'MomentApproxLongitude', ZASSET.ZUUID AS 'UUID', ZASSET.ZMEDIAGROUPUUID AS 'MediaGroupUUID', ZASSET.ZCLOUDASSETGUID AS 'CloudAssetGUID', ZADDITIONALASSETATTRIBUTES.ZPUBLICGLOBALUUID AS 'PublicGlobalUUID', ZADDITIONALASSETATTRIBUTES.ZMASTERFINGERPRINT AS 'MasterFingerprint', ZADDITIONALASSETATTRIBUTES.ZADJUSTEDFINGERPRINT AS 'AdjustedFingerprint' FROM ZASSET LEFT JOIN ZMOMENT ON ZASSET.ZMOMENT = ZMOMENT.Z_PK JOIN ZADDITIONALASSETATTRIBUTES ON ZASSET.ZADDITIONALATTRIBUTES = ZADDITIONALASSETATTRIBUTES.Z_PK LEFT JOIN ZDETECTEDFACE ON ZADDITIONALASSETATTRIBUTES.ZASSET = ZDETECTEDFACE.ZASSET LEFT JOIN ZPERSON ON ZPERSON.Z_PK = ZDETECTEDFACE.ZPERSON LEFT JOIN Z_26ASSETS ON ZASSET.Z_PK = Z_26ASSETS.Z_3ASSETS LEFT JOIN ZGENERICALBUM ON ZGENERICALBUM.Z_PK = Z_26ASSETS.Z_26ALBUMS """) all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] counter = 0 if usageentries > 0: for row in all_rows: postal_address = '' postal_address_subadminarea = '' postal_address_sublocality = '' if row[61] is not None: pathto = os.path.join( report_folder, 'ReverseLocationData' + str(counter) + '.bplist') with open(pathto, 'ab') as wf: wf.write(row[61]) with open(pathto, 'rb') as f: try: deserialized_plist = nd.deserialize_plist(f) postal_address = deserialized_plist[ 'postalAddress']['_formattedAddress'] postal_address_subadminarea = deserialized_plist[ 'postalAddress']['_subAdministrativeArea'] postal_address_sublocality = deserialized_plist[ 'postalAddress']['_subLocality'] except: logfunc( 'Error reading exported bplist from Asset PK' + row[0]) deserialized_plist = None data_list.append( (row[0], row[0], postal_address, postal_address_subadminarea, postal_address_sublocality, row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27], row[28], row[29], row[30], row[31], row[32], row[33], row[34], row[35], row[36], row[37], row[38], row[39], row[40], row[41], row[42], row[43], row[44], row[45], row[46], row[47], row[48], row[49], row[50], row[51], row[52], row[53], row[54], row[55], row[56], row[57], row[58], row[59], row[60], row[61], row[62], row[63], row[64], row[65], row[66], row[67], row[68], row[69], row[70], row[71], row[72], row[73], row[74], row[75], row[76], row[77], row[78], row[79], row[80], row[81])) counter += 1 description = '' report = ArtifactHtmlReport('Photos.sqlite') report.start_artifact_report(report_folder, 'Metadata', description) report.add_script() data_headers = ( 'Timestamp', 'Date Created', 'Postal Address', 'Postal Subadmin Area', 'Postal Sublocality', 'Asset ZPK', 'Add Attributes Key', 'Detected Face Asset', 'Kind', 'EXIF Timestamp', 'Scene Analysis Timestamp', 'Analysis State Modified Date', 'Add Date', 'Filename', 'Original Filename', 'Album Title', 'Creator Bundle ID', 'Editor Bundle ID', 'Directory', 'Uniform ID', 'Saved Asset Type', 'Face Detected in Photo', 'Display Name', 'Full Name', 'Face Count', 'Person', 'Contact Blob', 'Person UUID', 'Detected Face Quality', 'Age Type Estimate', 'Gender', 'Glasses Type', 'Facial Hair Type', 'Baldness', 'Color Space', 'Duration', 'Video Duration', 'Complete', 'Visibility State', 'Favorite', 'Hidden Fie?', 'Trash State', 'File Trash Date', 'View Count', 'Play Count', 'Share Count', 'Last Shared Date', 'File Modification Date', 'Has Adjustments?', 'Adjustment Timestamp', 'Original File Size', 'File Height', 'Org File Height', 'File Width', 'Org File Width', 'Orientation', 'Org Orientation', 'Timezone Name', 'Timezone Offset', 'Infered Timezone Offset', 'File Location Data', 'Latitude', 'Longitude', 'Shifted Location Valid', 'Reverse Lcoation Data is Valid', 'Org File Reverse Location Data', 'Thumbnail Index', 'Embedded Thumbnail Width', 'Embedded Thumbnail Height', 'Embedded Thumbnail Offset', 'Embedded Thumbnail Lenght', 'Moment PK', 'Moment Title', 'Moment Start Date', 'Moment Representative Date', 'Moment Modification Date', 'Moment End Date', 'Moment Trash State', 'Moment Approx Latitude', 'Moment Approx Longitude', 'UUID', 'Media Group UUID', 'Cloud Assest GUID', 'Public Global UUID', 'Master Fingetprint', 'Adjusted Fingerprint') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Photos-sqlite Metadata' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Photos-sqlite Metadata' timeline(report_folder, tlactivity, data_list, data_headers) kmlactivity = 'Photos-sqlite Metadata' kmlgen(report_folder, kmlactivity, data_list, data_headers) else: logfunc('No data available for Photos.sqlite metadata') db.close() return
def get_bumble(files_found, report_folder, seeker): for file_found in files_found: file_found = str(file_found) if file_found.endswith('Chat.sqlite'): chat_db = file_found if file_found.endswith('yap-database.sqlite'): account_db = file_found db = open_sqlite_db_readonly(chat_db) cursor = db.cursor() cursor.execute(''' select database2.data, case secondaryIndex_isReadIndex.isIncoming when 0 then 'Outgoing' when 1 then 'Incoming' end as "Direction", case secondaryIndex_isReadIndex.isRead when 0 then '' when 1 then 'Yes' end as "Message Read" from database2 join secondaryIndex_isReadIndex on database2.rowid = secondaryIndex_isReadIndex.rowid ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] bumble_datecreated = '' bumble_datemodified = '' bumble_message = '' bumble_receiver = '' bumble_sender = '' if usageentries > 0: for row in all_rows: plist_file_object = io.BytesIO(row[0]) if row[0] is None: pass else: if row[0].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc( f'Failed to read plist for {row[0]}, error was:' + str(ex)) if 'self.dateCreated' in plist: bumble_datecreated = datetime.datetime.utcfromtimestamp( plist['self.dateCreated']) bumble_datemodified = datetime.datetime.utcfromtimestamp( plist['self.dateModified']) bumble_sender = plist.get('self.fromPersonUid', '') bumble_receiver = plist.get('self.toPersonUid', '') bumble_message = plist.get('self.messageText', '') data_list.append( (bumble_datecreated, bumble_datemodified, bumble_sender, bumble_receiver, bumble_message, row[1], row[2])) else: pass description = 'Bumble - Messages' report = ArtifactHtmlReport('Bumble - Messages') report.start_artifact_report(report_folder, 'Bumble - Messages') report.add_script() data_headers = ( 'Created Timestamp', 'Modified Timestamp', 'Sender ID', 'Receiver ID', 'Message', 'Message Direction', 'Message Read' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'Bumble - Messages' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'Bumble - Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('Bumble - Messages data available') db = open_sqlite_db_readonly(account_db) cursor = db.cursor() cursor.execute(''' select data, key from database2 where key = 'lastLocation' or key = 'appVersion' or key = 'userName' or key = 'userId' ''') all_rows1 = cursor.fetchall() usageentries = len(all_rows1) data_list_account = [] if usageentries > 0: for row in all_rows1: plist_file_object = io.BytesIO(row[0]) if row[0] is None: pass else: if row[0].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc( f'Failed to read plist for {row[0]}, error was:' + str(ex)) if row[1] == 'userId': bumble_userId = plist.get('root', '') data_list_account.append(('User ID', str(bumble_userId))) elif row[1] == 'userName': bumble_userName = plist.get('root', '') data_list_account.append(('User Name', str(bumble_userName))) elif row[1] == 'lastLocation': bumble_timestamp = datetime.datetime.utcfromtimestamp( int(plist['kCLLocationCodingKeyTimestamp']) + 978307200) bumble_lastlat = plist.get( 'kCLLocationCodingKeyRawCoordinateLatitude', '') bumble_lastlong = plist.get( 'kCLLocationCodingKeyRawCoordinateLongitude', '') data_list_account.append(('Timestamp', str(bumble_timestamp))) data_list_account.append(('Last Latitude', bumble_lastlat)) data_list_account.append(('Last Longitude', bumble_lastlong)) elif row[1] == 'appVersion': bumble_appVersion = plist.get('root', '') data_list_account.append( ('App Version', str(bumble_appVersion))) else: pass description = 'Bumble - Account Details' report = ArtifactHtmlReport('Bumble - Account Details') report.start_artifact_report(report_folder, 'Bumble - Account Details') report.add_script() data_headers_account = ( 'Key', 'Values' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element report.write_artifact_data_table(data_headers_account, data_list_account, file_found) report.end_artifact_report() tsvname = f'Bumble - Account Details' tsv(report_folder, data_headers_account, data_list_account, tsvname) tlactivity = f'Bumble - Account Details' timeline(report_folder, tlactivity, data_list_account, data_headers_account) else: logfunc('Bumble - Account Details data available') db.close()
def get_appItunesmeta(files_found, report_folder, seeker): data_list = [] for file_found in files_found: file_found = str(file_found) if file_found.endswith('iTunesMetadata.plist'): plist = biplist.readPlist(file_found) purchasedate = ( plist['com.apple.iTunesStore.downloadInfo']['purchaseDate']) bundleid = (plist['softwareVersionBundleId']) itemname = (plist['itemName']) artistname = (plist['artistName']) versionnum = (plist['bundleShortVersionString']) downloadedby = (plist['com.apple.iTunesStore.downloadInfo'] ['accountInfo']['AppleID']) genre = (plist['genre']) factoryinstall = (plist['isFactoryInstall']) appreleasedate = (plist['releaseDate']) sourceapp = (plist['sourceApp']) sideloaded = (plist['sideLoadedDeviceBasedVPP']) variantid = (plist['variantID']) p = pathlib.Path(file_found) parent = p.parent itunes_metadata_path = (os.path.join(parent, "BundleMetadata.plist")) if os.path.exists(itunes_metadata_path): with open(itunes_metadata_path, 'rb') as f: deserialized_plist = nd.deserialize_plist(f) install_date = (deserialized_plist['installDate']) else: install_date = '' data_list.append( (install_date, purchasedate, bundleid, itemname, artistname, versionnum, downloadedby, genre, factoryinstall, appreleasedate, sourceapp, sideloaded, variantid, parent)) if len(data_list) > 0: fileloc = 'See source file location column' description = 'iTunes & Bundle ID Metadata contents for apps' report = ArtifactHtmlReport('Apps - Itunes & Bundle Metadata') report.start_artifact_report(report_folder, 'Apps - Itunes Metadata', description) report.add_script() data_headers = ('Installed Date', 'App Purchase Date', 'Bundle ID', 'Item Name', 'Artist Name', 'Version Number', 'Downloaded by', 'Genre', 'Factory Install', 'App Release Date', 'Source App', 'Sideloaded?', 'Variant ID', 'Source File Location') report.write_artifact_data_table(data_headers, data_list, fileloc) report.end_artifact_report() tsvname = 'Apps - Itunes Bundle Metadata' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Apps - Itunes Bundle Metadata' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No data on Apps - Itunes Bundle Metadata')
def get_applicationstate(files_found, report_folder, seeker): file_found = str(files_found[0]) db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' select ait.application_identifier as ai, kvs.value as compat_info, (SELECT kvs.value from kvs left join application_identifier_tab on application_identifier_tab.id = kvs.application_identifier left join key_tab on kvs.key = key_tab.id WHERE key_tab.key='XBApplicationSnapshotManifest' and kvs.key = key_tab.id and application_identifier_tab.id = ait.id ) as snap_info from kvs left join application_identifier_tab ait on ait.id = kvs.application_identifier left join key_tab on kvs.key = key_tab.id where key_tab.key='compatibilityInfo' order by ait.id ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: data_list = [] snap_info_list = [] for row in all_rows: bundleid = str(row[0]) plist_file_object = io.BytesIO(row[1]) try: plist = nd.deserialize_plist(plist_file_object) if type(plist) is dict: var1 = plist.get('bundleIdentifier', '') var2 = plist.get('bundlePath', '') var3 = plist.get('sandboxPath', '') data_list.append((var1, var2, var3)) if row[2]: snap_info_list.append((var1, var2, var3, row[2])) else: logfunc(f'For {row[0]} Unexpected type "' + str(type(plist)) + '" found as plist root, can\'t process') except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc(f'Failed to read plist for {row[0]}, error was:' + str(ex)) report = ArtifactHtmlReport('Application State') report.start_artifact_report(report_folder, 'Application State DB') report.add_script() data_headers = ('Bundle ID', 'Bundle Path', 'Sandbox Path') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Application State' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No Application State data available') db.close() return
def get_cloudkitNoteSharing(files_found, report_folder, seeker): for file_found in files_found: file_found = str(file_found) if file_found.endswith('NoteStore.sqlite'): db = sqlite3.connect(file_found) cursor = db.cursor() cursor.execute(''' SELECT Z_PK, ZSERVERRECORDDATA FROM ZICCLOUDSYNCINGOBJECT WHERE ZSERVERRECORDDATA NOT NULL ''') note_data = [] all_rows = cursor.fetchall() result_number = len(all_rows) if result_number > 0: for row in all_rows: filename = os.path.join( report_folder, 'zserverrecorddata_' + str(row[0]) + '.bplist') output_file = open(filename, "wb") output_file.write(row[1]) output_file.close() deserialized_plist = nd.deserialize_plist( io.BytesIO(row[1])) creator_id = '' last_modified_id = '' creation_date = '' last_modified_date = '' last_modified_device = '' record_type = '' record_id = '' for item in deserialized_plist: if 'RecordCtime' in item: creation_date = item['RecordCtime'] elif 'RecordMtime' in item: last_modified_date = item['RecordMtime'] elif 'LastModifiedUserRecordID' in item: last_modified_id = item[ 'LastModifiedUserRecordID']['RecordName'] elif 'CreatorUserRecordID' in item: creator_id = item['CreatorUserRecordID'][ 'RecordName'] elif 'ModifiedByDevice' in item: last_modified_device = item['ModifiedByDevice'] elif 'RecordType' in item: record_type = item['RecordType'] elif 'RecordID' in item: record_id = item['RecordID']['RecordName'] note_data.append([ record_id, record_type, creation_date, creator_id, last_modified_date, last_modified_id, last_modified_device ]) description = 'CloudKit Note Sharing - Notes information shared via CloudKit. Look up the Record ID in the ZICCLOUDSYYNCINGOBJECT.ZIDENTIFIER column. ' report = ArtifactHtmlReport('Note Sharing') report.start_artifact_report(report_folder, 'Note Sharing', description) report.add_script() note_headers = ('Record ID', 'Record Type', 'Creation Date', 'Creator ID', 'Modified Date', 'Modifier ID', 'Modifier Device') report.write_artifact_data_table(note_headers, note_data, file_found) report.end_artifact_report() tsvname = 'Cloudkit Note Sharing' tsv(report_folder, note_headers, note_data, tsvname) else: logfunc('No Cloudkit - Cloudkit Note Sharing data available') db.close()
def get_instagramThreads(files_found, report_folder, seeker): for file_found in files_found: file_found = str(file_found) if file_found.endswith('.db'): break db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' select metadata from threads ''') all_rows = cursor.fetchall() usageentries = len(all_rows) fila = 0 userdict = {} data_list = [] video_calls = [] if usageentries > 0: for row in all_rows: plist = '' plist_file_object = io.BytesIO(row[0]) if row[0].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc(f'Failed to read plist for {row[0]}, error was:' + str(ex)) for i in plist['NSArray<IGUser *>*users']: for x, y in enumerate(plist['NSArray<IGUser *>*users']): userPk = plist['NSArray<IGUser *>*users'][x]['pk'] userFull = ( plist['NSArray<IGUser *>*users'][x]['fullName']) userdict[userPk] = userFull inviterPk = plist['IGUser*inviter']['pk'] inviterFull = plist['IGUser*inviter']['fullName'] userdict[inviterPk] = inviterFull cursor.execute(''' select messages.message_id, messages.thread_id, messages.archive, threads.metadata, threads.thread_messages_range, threads.visual_message_info from messages, threads where messages.thread_id = threads.thread_id ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: for row in all_rows: plist = '' senderpk = '' serverTimestamp = '' message = '' videoChatTitle = '' videoChatCallID = '' dmreaction = '' reactionServerTimestamp = '' reactionUserID = '' sharedMediaID = '' sharedMediaURL = '' plist_file_object = io.BytesIO(row[2]) if row[2].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc(f'Failed to read plist for {row[2]}, error was:' + str(ex)) #Messages senderpk = plist['IGDirectPublishedMessageMetadata*metadata'][ 'NSString*senderPk'] serverTimestamp = plist[ 'IGDirectPublishedMessageMetadata*metadata'][ 'NSDate*serverTimestamp'] message = plist['IGDirectPublishedMessageContent*content'].get( 'NSString*string') #VOIP calls if plist['IGDirectPublishedMessageContent*content'].get( 'IGDirectThreadActivityAnnouncement*threadActivity' ) is not None: videoChatTitle = plist[ 'IGDirectPublishedMessageContent*content'][ 'IGDirectThreadActivityAnnouncement*threadActivity'][ 'NSString*voipTitle'] videoChatCallID = plist[ 'IGDirectPublishedMessageContent*content'][ 'IGDirectThreadActivityAnnouncement*threadActivity'][ 'NSString*videoCallId'] #Reactions reactions = (plist['NSArray<IGDirectMessageReaction *>*reactions']) if reactions: dmreaction = reactions[0].get('emojiUnicode') reactionServerTimestamp = reactions[0].get('serverTimestamp') reactionUserID = reactions[0].get('userId') #Shared media if (plist['IGDirectPublishedMessageContent*content'].get( 'IGDirectPublishedMessageMedia*media')): sharedMediaID = plist['IGDirectPublishedMessageContent*content'][ 'IGDirectPublishedMessageMedia*media'][ 'IGDirectPublishedMessagePermanentMedia*permanentMedia'][ 'IGPhoto*photo']['kIGPhotoMediaID'] sharedMediaURL = plist['IGDirectPublishedMessageContent*content'][ 'IGDirectPublishedMessageMedia*media'][ 'IGDirectPublishedMessagePermanentMedia*permanentMedia'][ 'IGPhoto*photo']['imageVersions'][0]['url'][ 'NS.relative'] if senderpk in userdict: user = userdict[senderpk] else: user = '' data_list.append( (serverTimestamp, senderpk, user, message, videoChatTitle, videoChatCallID, dmreaction, reactionServerTimestamp, reactionUserID, sharedMediaID, sharedMediaURL)) if videoChatTitle: video_calls.append((serverTimestamp, senderpk, user, videoChatTitle, videoChatCallID)) description = 'Instagram Threads' report = ArtifactHtmlReport('Instagram Threads') report.start_artifact_report(report_folder, 'Instagram Threads', description) report.add_script() data_headers = ('Timestamp', 'Sender ID', 'Username', 'Message', 'Video Chat Title', 'Video Chat ID', 'DM Reaction', 'DM Reaction Server Timestamp', 'Reaction User ID', 'Shared Media ID', 'Shared Media URL') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Instagram Threads' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Instagram Threads' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Instagram Threads data available') if len(video_calls) > 0: description = 'Instagram Threads Calls' report = ArtifactHtmlReport('Instagram Threads Calls') report.start_artifact_report(report_folder, 'Instagram Threads Calls', description) report.add_script() data_headersv = ('Timestamp', 'Sender ID', 'Username', 'Video Chat Title', 'Video Chat ID') report.write_artifact_data_table(data_headersv, video_calls, file_found) report.end_artifact_report() tsvname = 'Instagram Threads Calls' tsv(report_folder, data_headersv, video_calls, tsvname) tlactivity = 'Instagram Threads Calls' timeline(report_folder, tlactivity, video_calls, data_headersv) else: logfunc('No Instagram Threads Video Calls data available') db.close()
def get_notificationsXII(files_found, report_folder, seeker): bundle_info = get_bundle_info(files_found) data_list = [] exportedbplistcount = 0 pathfound = str(files_found[0]) # logfunc(f'Posix to string is: {pathfound}') for filepath in glob.iglob(pathfound + "/**", recursive=True): # create directory where script is running from if os.path.isfile(filepath): # filter dirs file_name = os.path.splitext(os.path.basename(filepath))[0] # create directory if filepath.endswith('DeliveredNotifications.plist'): bundle_id = os.path.basename(os.path.dirname(filepath)) # open the plist p = open(filepath, "rb") plist = nd.deserialize_plist(p) # Empty plist will be { 'root': None } if isinstance(plist, dict): continue # skip it, it's empty # Good plist will be a list of dicts for item in plist: creation_date = '' title = '' subtitle = '' message = '' other_dict = {} bundle_name = bundle_info.get(bundle_id, bundle_id) #if bundle_name == 'com.apple.ScreenTimeNotifications': # pass # has embedded plist! for k, v in item.items(): if k == 'AppNotificationCreationDate': creation_date = str(v) elif k == 'AppNotificationMessage': message = v elif k == 'AppNotificationTitle': title = v elif k == 'AppNotificationSubtitle': subtitle = v else: if isinstance(v, bytes): logfunc(f'Found binary data, look into this one later k={k}!') elif isinstance(v, dict): pass # recurse look for plists #TODO elif isinstance(v, list): pass # recurse look for plists #TODO other_dict[k] = str(v) if subtitle: title += f'[{subtitle}]' data_list.append((creation_date, bundle_name, title, message, str(other_dict))) p.close() elif "AttachmentsList" in file_name: pass # future development description = 'iOS > 12 Notifications' report = ArtifactHtmlReport('iOS Notificatons') report.start_artifact_report(report_folder, 'iOS Notifications', description) report.add_script() data_headers = ('Creation Time', 'Bundle', 'Title[Subtitle]', 'Message', 'Other Details') report.write_artifact_data_table(data_headers, data_list, filepath) report.end_artifact_report() logfunc("Total notifications processed:" + str(len(data_list))) #logfunc("Total exported bplists from notifications:" + str(exportedbplistcount)) tsvname = 'Notifications' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Notifications' timeline(report_folder, tlactivity, data_list, data_headers) if len(data_list) == 0: logfunc("No notifications found.")
def get_imoHD_Chat(files_found, report_folder, seeker): for file_found in files_found: file_found = str(file_found) if file_found.endswith('.sqlite'): break db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' select case ZIMOCHATMSG.ZTS when 0 then '' else datetime(ZTS/1000000000,'unixepoch') end as "Timestamp", ZIMOCONTACT.ZDISPLAY as "Sender Display Name", ZIMOCHATMSG.ZALIAS as "Sender Alias", ZIMOCONTACT.ZDIGIT_PHONE, ZIMOCHATMSG.ZTEXT as "Message", case ZIMOCHATMSG.ZISSENT when 0 then 'Received' when 1 then 'Sent' end as "Message Status", ZIMOCHATMSG.ZIMDATA from ZIMOCHATMSG left join ZIMOCONTACT ON ZIMOCONTACT.ZBUID = ZIMOCHATMSG.ZA_UID ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: plist = '' timestamp = row[0] senderName = row[1] senderAlias = row[2] senderPhone = row[3] message = row[4] messageStatus = row[5] itemAction = '' attachmentURL = '' thumb = '' plist_file_object = io.BytesIO(row[6]) if row[6] is None: pass else: if row[6].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc( f'Failed to read plist for {row[0]}, error was:' + str(ex)) itemAction = plist['type'] #Check for Attachments if plist.get('objects') is not None: attachmentName = plist['objects'][0]['object_id'] attachmentURL = "https://cdn.imoim.us/s/object/" + attachmentName + "/" for match in files_found: if attachmentName in match: shutil.copy2(match, report_folder) data_file_name = os.path.basename(match) thumb = f'<img src="{report_folder}/{data_file_name}" width="300"></img>' else: attachmentURL = '' data_list.append( (timestamp, senderName, senderAlias, senderPhone, message, messageStatus, itemAction, attachmentURL, thumb)) description = 'IMO HD Chat - Messages' report = ArtifactHtmlReport('IMO HD Chat - Messages') report.start_artifact_report(report_folder, 'IMO HD Chat - Messages') report.add_script() data_headers = ( 'Timestamp', 'Sender Name', 'Sender Alias', 'Sender Phone', 'Message', 'Message Status', 'Item Action', 'Attachment URL', 'Attachment' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Attachment']) report.end_artifact_report() tsvname = f'IMO HD Chat - Messages' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'IMO HD Chat - Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('IMO HD Chat - Messages data available') cursor.execute(''' select ZPH_NAME, ZALIAS, ZPHONE, "https://cdn.imoim.us/s/object/" || ZICON_ID || "/" as "Profile Pic", ZBUID from ZIMOCONTACT ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: description = 'IMO HD Chat - Contacts' report = ArtifactHtmlReport('IMO HD Chat - Contacts') report.start_artifact_report(report_folder, 'IMO HD Chat - Contacts') report.add_script() data_headers = ( 'Contact Name', 'Contact Alias', 'Contact Phone', 'Profile Pic URL', 'User ID' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: data_list.append((row[0], row[1], row[2], row[3], row[4])) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'IMO HD Chat - Contacts' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = f'IMO HD Chat - Contacts' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('IMO HD Chat - Contacts data available') db.close()
def get_teams(files_found, report_folder, seeker): CacheFile = 0 for file_found in files_found: file_found = str(file_found) if file_found.endswith('.sqlite'): databasedata = file_found if file_found.endswith('CacheFile'): CacheFile = file_found if CacheFile != 0: with open(CacheFile, 'rb') as nsfile: nsplist = nd.deserialize_plist(nsfile) db = open_sqlite_db_readonly(databasedata) cursor = db.cursor() cursor.execute(''' SELECT datetime('2001-01-01', "ZARRIVALTIME" || ' seconds'), ZIMDISPLAYNAME, ZCONTENT from ZSMESSAGE ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: thumb = '' if '<div><img src=' in row[2]: matches = re.search('"([^"]+)"', row[2]) imageURL = (matches[0].strip('\"')) if imageURL in nsplist.keys(): data_file_real_path = nsplist[imageURL] for match in files_found: if data_file_real_path in match: shutil.copy2(match, report_folder) data_file_name = os.path.basename(match) thumb = f'<img src="{report_folder}/{data_file_name}"></img>' data_list.append((row[0], row[1], row[2], thumb)) description = 'Teams Messages' report = ArtifactHtmlReport('Teams Messages') report.start_artifact_report(report_folder, 'Teams Messages', description) report.add_script() data_headers = ('Timestamp', 'Name', 'Message', 'Shared Media') report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Shared Media']) report.end_artifact_report() tsvname = 'Microsoft Teams Messages' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Microsoft Teams Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Microsoft Teams Messages data available') cursor.execute(''' SELECT ZDISPLAYNAME, zemail, ZPHONENUMBER from ZDEVICECONTACTHASH ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: data_list.append((row[0], row[1], row[2])) description = 'Teams Contact' report = ArtifactHtmlReport('Teams Contact') report.start_artifact_report(report_folder, 'Teams Contact', description) report.add_script() data_headers = ('Display Name', 'Email', 'Phone Number') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Teams Contact' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No Teams Contact data available') cursor.execute(''' SELECT datetime('2001-01-01', "ZTS_LASTSYNCEDAT" || ' seconds'), ZDISPLAYNAME, ZTELEPHONENUMBER from zuser ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list = [] if usageentries > 0: for row in all_rows: data_list.append((row[0], row[1], row[2])) description = 'Teams User' report = ArtifactHtmlReport('Teams User') report.start_artifact_report(report_folder, 'Teams User', description) report.add_script() data_headers = ('Timestamp Last Sync', 'Display Name', 'Phone Number') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = 'Microsoft Teams User' tsv(report_folder, data_headers, data_list, tsvname) tlactivity = 'Microsoft Teams User' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Teams User data available') cursor.execute(''' SELECT ZCOMPOSETIME, zfrom, ZIMDISPLAYNAME, zcontent, ZPROPERTIES from ZSMESSAGE, ZMESSAGEPROPERTIES where ZSMESSAGE.ZTSID = ZMESSAGEPROPERTIES.ZTSID order by ZCOMPOSETIME ''') all_rows = cursor.fetchall() usageentries = len(all_rows) data_list_calls = [] data_list_cards = [] data_list_unparsed = [] if usageentries > 0: for row in all_rows: plist = '' composetime = row[0].replace('T', ' ') plist_file_object = io.BytesIO(row[4]) if row[4].find(b'NSKeyedArchiver') == -1: if sys.version_info >= (3, 9): plist = plistlib.load(plist_file_object) else: plist = biplist.readPlist(plist_file_object) else: try: plist = nd.deserialize_plist(plist_file_object) except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: logfunc(f'Failed to read plist for {row[4]}, error was:' + str(ex)) if 'call-log' in plist: datacalls = json.loads(plist['call-log']) callstart = (datacalls.get('startTime')) callstart = callstart.replace('T', ' ') callconnect = (datacalls.get('connectTime')) callconnect = callconnect.replace('T', ' ') callend = (datacalls['endTime']) callend = callend.replace('T', ' ') calldirection = (datacalls['callDirection']) calltype = (datacalls['callType']) callstate = (datacalls['callState']) calloriginator = (datacalls['originator']) calltarget = (datacalls['target']) calloriginatordname = ( datacalls['originatorParticipant']['displayName']) callparticipantdname = ( datacalls['targetParticipant']['displayName']) data_list_calls.append( (composetime, row[1], row[2], row[3], callstart, callconnect, callend, calldirection, calltype, callstate, calloriginator, calltarget, calloriginatordname, callparticipantdname)) elif 'cards' in plist: cards = json.loads(plist['cards']) cardurl = ( cards[0]['content']['body'][0]['selectAction']['url']) cardtitle = ( cards[0]['content']['body'][0]['selectAction']['title']) cardtext = (cards[0]['content']['body'][1]['text']) cardurl2 = (cards[0]['content']['body'][0]['url']) if (cards[0]['content']['body'][0].get('id')) is not None: idcontent = json.loads( cards[0]['content']['body'][0]['id']) cardlat = (idcontent.get('latitude')) cardlong = (idcontent.get('longitude')) cardexpires = (idcontent.get('expiresAt')) cardexpires = datetime.datetime.fromtimestamp(cardexpires / 1000) carddevid = (idcontent.get('deviceId')) data_list_cards.append( (composetime, row[1], row[2], row[3], cardurl, cardtitle, cardtext, cardurl2, cardlat, cardlong, cardexpires, carddevid)) else: data_list_unparsed.append(composetime, row[1], row[2], row[3], plist) description = 'Microsoft Teams Call Logs' report = ArtifactHtmlReport('Microsoft Teams Call Logs') report.start_artifact_report(report_folder, 'Teams Call Logs', description) report.add_script() data_headers = ('Compose Timestamp', 'From', 'Display Name', 'Content', ' Call Start', 'Call Connect', 'Call End', 'Call Direction', 'Call Type', 'Call State', 'Call Originator', 'Call Target', 'Call Originator Name', 'Call Participant Name') report.write_artifact_data_table(data_headers, data_list_calls, file_found) report.end_artifact_report() tsvname = 'Microsoft Teams Call Logs' tsv(report_folder, data_headers, data_list_calls, tsvname) tlactivity = 'Microsoft Teams Call Logs' timeline(report_folder, tlactivity, data_list_calls, data_headers) description = 'Microsoft Teams Shared Locations' report = ArtifactHtmlReport('Microsoft Teams Shared Locations') report.start_artifact_report(report_folder, 'Teams Shared Locations', description) report.add_script() data_headers = ('Timestamp', 'From', 'Display Name', 'Content', 'Card URL', 'Card Title', 'Card Text', 'Card URL2', 'Latitude', 'Longitude', 'Card Expires', 'Device ID') report.write_artifact_data_table(data_headers, data_list_cards, file_found) report.end_artifact_report() tsvname = 'Microsoft Teams Shared Locations' tsv(report_folder, data_headers, data_list_cards, tsvname) tlactivity = 'Microsoft Teams Shared Locations' timeline(report_folder, tlactivity, data_list_cards, data_headers) kmlactivity = 'Microsoft Teams Shared Locations' kmlgen(report_folder, kmlactivity, data_list_cards, data_headers) else: logfunc('No Microsoft Teams Call Logs & Cards data available') db.close()
def ReadPlist(path_or_file, deserialize=False): ''' Safely open and read a plist. Returns a tuple (True/False, plist/None, "error_message") ''' #log.debug("Trying to open plist file : " + path) error = '' path = '' plist = None f = None if isinstance(path_or_file, str): path = path_or_file try: f = open(path, 'rb') except OSError as ex: error = 'Could not open file, Error was : ' + str(ex) else: # its a file f = path_or_file if f: if deserialize: try: plist = nd.deserialize_plist(f) f.close() return (True, plist, '') except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException, plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex: error = 'Error deserializing plist: ' + path + " Error was : " + str( ex) f.close() return (False, plist, error) else: try: if sys.version_info >= (3, 9): plist = plistlib.load(f) else: plist = biplist.readPlist(f) return (True, plist, '') except (biplist.InvalidPlistException, plistlib.InvalidFileException) as ex: try: # Check for XML format f.seek(0) file_start_bytes = f.read(10) if file_start_bytes.find(b'?xml') > 0: # Perhaps this is manually edited or incorrectly formatted # that has left whitespaces at the start of file before <?xml tag # Or it's a bigSur (11.0) plist with hex integers f.seek(0) data = f.read().decode('utf8', 'ignore') f.close() data = CommonFunctions.replace_all_hex_int_with_int( data) # Fix for BigSur plists with hex ints data = data.lstrip(" \r\n\t").encode( 'utf8', 'backslashreplace') if sys.version_info >= (3, 9): plist = plistlib.loads(data, fmt=plistlib.FMT_XML) else: plist = biplist.readPlistFromString(data) return (True, plist, '') else: error = 'Not a plist! ' + path + " Error was : " + str( ex) except (biplist.InvalidPlistException, ValueError, plistlib.InvalidFileException) as ex: error = 'Could not read plist: ' + path + " Error was : " + str( ex) return (False, None, error)