Esempio n. 1
0
def get_knowCinfocus(files_found, report_folder, seeker):
	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	
	iOSversion = scripts.artifacts.artGlobals.versionf
	if version.parse(iOSversion) >= version.parse("12"):
		cursor = db.cursor()
		cursor.execute('''
		SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
				ZSTRUCTUREDMETADATA .Z_DKAPPLICATIONMETADATAKEY__LAUNCHREASON AS "LAUNCH REASON",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
				    WHEN "1" THEN "Sunday"
				    WHEN "2" THEN "Monday"
				    WHEN "3" THEN "Tuesday"
				    WHEN "4" THEN "Wednesday"
				    WHEN "5" THEN "Thursday"
				    WHEN "6" THEN "Friday"
				    WHEN "7" THEN "Saturday"
				END "DAY OF WEEK",
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
				ZOBJECT.ZUUID AS "UUID",	
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
			FROM ZOBJECT
			LEFT JOIN
		         ZSTRUCTUREDMETADATA 
		         ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			WHERE ZSTREAMNAME IS "/app/inFocus"
			''')
	else:
			cursor = db.cursor()
			cursor.execute('''
			SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
				    WHEN "1" THEN "Sunday"
				    WHEN "2" THEN "Monday"
				    WHEN "3" THEN "Tuesday"
				    WHEN "4" THEN "Wednesday"
				    WHEN "5" THEN "Thursday"
				    WHEN "6" THEN "Friday"
				    WHEN "7" THEN "Saturday"
				END "DAY OF WEEK",
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",	
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
			FROM ZOBJECT
			WHERE ZSTREAMNAME IS "/app/inFocus"
					''')

	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	if usageentries > 0:
		data_list = []
		if version.parse(iOSversion) >= version.parse("12"):
					
			for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10]))

			report = ArtifactHtmlReport('KnowledgeC Application In Focus')
			report.start_artifact_report(report_folder, 'App In Focus')
			report.add_script()
			data_headers = ('Start','End','Bundle ID','Launch Reason', 'Usage in Seconds', 'Usage in Minutes','Day of Week','GMT Offset','Entry Creation','UUID','ZOBJECT Table ID')  
			report.write_artifact_data_table(data_headers, data_list, file_found)
			report.end_artifact_report()
			
			tsvname = 'KnowledgeC Applicatoin in Focus'
			tsv(report_folder, data_headers, data_list, tsvname)
		else:
			for row in all_rows:    
				data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8]))
					
			report = ArtifactHtmlReport('KnowledgeC Application In Focus')
			report.start_artifact_report(report_folder, 'App in Focus')
			report.add_script()
			data_headers = ('Start','End','Bundle ID','Usage in Seconds','Usage in Minutes','Day of Week','GMT Offset','Entry Creation','ZOBJECT Table ID' ) 
			report.write_artifact_data_table(data_headers, data_list, file_found)
			report.end_artifact_report()
			
			tsvname = 'KnowledgeC Applicatoin in Focus'
			tsv(report_folder, data_headers, data_list, tsvname)
	else:
		logfunc('No data available in table')

	db.close()
	return      
Esempio n. 2
0
def get_knowCmediaplaying(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("11"):
        logfunc("Unsupported version for KnowledgC Media Playing" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__ALBUM AS "NOW PLAYING ALBUM", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__ARTIST AS "NOW PLAYING ARTIST", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__GENRE AS "NOW PLAYING GENRE", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__TITLE AS "NOW PLAYING TITLE", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__DURATION AS "NOW PLAYING DURATION",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",    
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
		ZOBJECT.ZUUID AS "UUID",
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/media/nowPlaying"	
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Media Playing')
        report.start_artifact_report(report_folder, 'Media Playing',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Now Playing Album',
                        'Now Playing Artists', 'Playing Genre',
                        'Playing Title', 'Now Playing Duration',
                        'Usage in Seconds', 'Usage in Minutes', 'Day of Week',
                        'GMT Offset', 'Entry Creation', 'UUID',
                        'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Media Playing'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Media Playing'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 3
0
def get_instagramThreads(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)
        
        if file_found.endswith('.db'):
            break
        
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    metadata
    from threads
    ''')
    
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    fila = 0
    userdict = {}
    data_list = []
    video_calls = []
    
    if usageentries > 0:
        for row in all_rows:
            plist = ''
            plist_file_object = io.BytesIO(row[0])
            if row[0].find(b'NSKeyedArchiver') == -1:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(plist_file_object)
                else:
                    plist = biplist.readPlist(plist_file_object)
            else:
                try:
                    plist = nd.deserialize_plist(plist_file_object)                    
                except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException,
                        nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex:
                    logfunc(f'Failed to read plist for {row[0]}, error was:' + str(ex))
            
            for i in plist['NSArray<IGUser *>*users']:
                for x, y in enumerate(plist['NSArray<IGUser *>*users']):
                    userPk = plist['NSArray<IGUser *>*users'][x]['pk']
                    userFull = (plist['NSArray<IGUser *>*users'][x]['fullName'])
                    userdict[userPk] = userFull
                
            inviterPk = plist['IGUser*inviter']['pk']
            inviterFull = plist['IGUser*inviter']['fullName']
            userdict[inviterPk] = inviterFull
        
    cursor.execute('''
    select
    messages.message_id,
    messages.thread_id,
    messages.archive,
    threads.metadata,
    threads.thread_messages_range,
    threads.visual_message_info
    from messages, threads
    where messages.thread_id = threads.thread_id
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    
    if usageentries > 0:
        for row in all_rows:
            plist = ''
            senderpk =''
            serverTimestamp = ''
            message = ''
            videoChatTitle = ''
            videoChatCallID = ''
            dmreaction = ''
            reactionServerTimestamp = ''
            reactionUserID = ''
            sharedMediaID = ''
            sharedMediaURL = ''
            
            plist_file_object = io.BytesIO(row[2])
            if row[2].find(b'NSKeyedArchiver') == -1:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(plist_file_object)
                else:
                    plist = biplist.readPlist(plist_file_object)
            else:
                try:
                    plist = nd.deserialize_plist(plist_file_object)                    
                except (nd.DeserializeError, nd.biplist.NotBinaryPlistException, nd.biplist.InvalidPlistException,
                        nd.plistlib.InvalidFileException, nd.ccl_bplist.BplistError, ValueError, TypeError, OSError, OverflowError) as ex:
                    logfunc(f'Failed to read plist for {row[2]}, error was:' + str(ex))
                
            #Messages
            senderpk = plist['IGDirectPublishedMessageMetadata*metadata']['NSString*senderPk']
            serverTimestamp = plist['IGDirectPublishedMessageMetadata*metadata']['NSDate*serverTimestamp']
            message = plist['IGDirectPublishedMessageContent*content'].get('NSString*string')
            
            #VOIP calls
            if plist['IGDirectPublishedMessageContent*content'].get('IGDirectThreadActivityAnnouncement*threadActivity') is not None:
                videoChatTitle = plist['IGDirectPublishedMessageContent*content']['IGDirectThreadActivityAnnouncement*threadActivity'].get('NSString*voipTitle')
                videoChatCallID = plist['IGDirectPublishedMessageContent*content']['IGDirectThreadActivityAnnouncement*threadActivity'].get('NSString*videoCallId')
                
                
            #Reactions
            reactions = (plist['NSArray<IGDirectMessageReaction *>*reactions'])
            if reactions:
                dmreaction = reactions[0].get('emojiUnicode')
                reactionServerTimestamp = reactions[0].get('serverTimestamp')
                reactionUserID = reactions[0].get('userId')
                
            #Shared media
            if (plist['IGDirectPublishedMessageContent*content'].get('IGDirectPublishedMessageMedia*media')): 
                try:
                    sharedMediaID = plist['IGDirectPublishedMessageContent*content']['IGDirectPublishedMessageMedia*media']['IGDirectPublishedMessagePermanentMedia*permanentMedia']['IGPhoto*photo']['kIGPhotoMediaID']
                except (KeyError, ValueError, TypeError, OSError, OverflowError) as ex:
                    print('Had exception: ' + str(ex))
                    sharedMediaID = None
                    
                try:
                    sharedMediaURL =  plist['IGDirectPublishedMessageContent*content']['IGDirectPublishedMessageMedia*media']['IGDirectPublishedMessagePermanentMedia*permanentMedia']['IGPhoto*photo']['imageVersions'][0]['url']['NS.relative']
                except (KeyError, ValueError, TypeError, OSError, OverflowError) as ex:
                    print('Had exception: ' + str(ex))
                    sharedMediaURL= None
                
            if senderpk in userdict:
                user = userdict[senderpk]
            else:
                user = ''
                
            data_list.append((serverTimestamp, senderpk, user, message, videoChatTitle, videoChatCallID, dmreaction, reactionServerTimestamp, reactionUserID, sharedMediaID, sharedMediaURL))
            if videoChatTitle:
                video_calls.append((serverTimestamp, senderpk, user, videoChatTitle, videoChatCallID))

        description = 'Instagram Threads'
        report = ArtifactHtmlReport('Instagram Threads')
        report.start_artifact_report(report_folder, 'Instagram Threads', description)
        report.add_script()
        data_headers = ('Timestamp', 'Sender ID', 'Username', 'Message', 'Video Chat Title', 'Video Chat ID', 'DM Reaction', 'DM Reaction Server Timestamp', 'Reaction User ID', 'Shared Media ID', 'Shared Media URL')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Instagram Threads'
        tsv(report_folder, data_headers, data_list, tsvname)
        
        tlactivity = 'Instagram Threads'
        timeline(report_folder, tlactivity, data_list, data_headers)
        
    else:
        logfunc('No Instagram Threads data available')
        
    if len(video_calls) > 0:
        description = 'Instagram Threads Calls'
        report = ArtifactHtmlReport('Instagram Threads Calls')
        report.start_artifact_report(report_folder, 'Instagram Threads Calls', description)
        report.add_script()
        data_headersv = ('Timestamp', 'Sender ID', 'Username',  'Video Chat Title', 'Video Chat ID')
        report.write_artifact_data_table(data_headersv, video_calls, file_found)
        report.end_artifact_report()
        
        tsvname = 'Instagram Threads Calls'
        tsv(report_folder, data_headersv, video_calls, tsvname)
        
        tlactivity = 'Instagram Threads Calls'
        timeline(report_folder, tlactivity, video_calls, data_headersv)
    
    else:
        logfunc('No Instagram Threads Video Calls data available')
        
    db.close()
    

        
        
Esempio n. 4
0
def get_bumble(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('Chat.sqlite'):
            chat_db = file_found
        if file_found.endswith('yap-database.sqlite'):
            account_db = file_found

    db = open_sqlite_db_readonly(chat_db)
    cursor = db.cursor()
    cursor.execute('''
    select
    database2.data,
    case secondaryIndex_isReadIndex.isIncoming
        when 0 then 'Outgoing'
        when 1 then 'Incoming'
    end as "Direction",
    case secondaryIndex_isReadIndex.isRead
        when 0 then ''
        when 1 then 'Yes'
    end as "Message Read"
    from database2
    join secondaryIndex_isReadIndex on database2.rowid = secondaryIndex_isReadIndex.rowid
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    bumble_datecreated = ''
    bumble_datemodified = ''
    bumble_message = ''
    bumble_receiver = ''
    bumble_sender = ''

    if usageentries > 0:
        for row in all_rows:

            plist_file_object = io.BytesIO(row[0])
            if row[0] is None:
                pass
            else:
                if row[0].find(b'NSKeyedArchiver') == -1:
                    if sys.version_info >= (3, 9):
                        plist = plistlib.load(plist_file_object)
                    else:
                        plist = biplist.readPlist(plist_file_object)
                else:
                    try:
                        plist = nd.deserialize_plist(plist_file_object)
                    except (nd.DeserializeError,
                            nd.biplist.NotBinaryPlistException,
                            nd.biplist.InvalidPlistException,
                            nd.plistlib.InvalidFileException,
                            nd.ccl_bplist.BplistError, ValueError, TypeError,
                            OSError, OverflowError) as ex:
                        logfunc(
                            f'Failed to read plist for {row[0]}, error was:' +
                            str(ex))

            if 'self.dateCreated' in plist:
                bumble_datecreated = datetime.datetime.utcfromtimestamp(
                    plist['self.dateCreated'])
                bumble_datemodified = datetime.datetime.utcfromtimestamp(
                    plist['self.dateModified'])
                bumble_sender = plist.get('self.fromPersonUid', '')
                bumble_receiver = plist.get('self.toPersonUid', '')
                bumble_message = plist.get('self.messageText', '')

                data_list.append(
                    (bumble_datecreated, bumble_datemodified, bumble_sender,
                     bumble_receiver, bumble_message, row[1], row[2]))
            else:
                pass

        description = 'Bumble - Messages'
        report = ArtifactHtmlReport('Bumble - Messages')
        report.start_artifact_report(report_folder, 'Bumble - Messages')
        report.add_script()
        data_headers = (
            'Created Timestamp', 'Modified Timestamp', 'Sender ID',
            'Receiver ID', 'Message', 'Message Direction', 'Message Read'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Bumble - Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Bumble - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('Bumble - Messages data available')

    db = open_sqlite_db_readonly(account_db)
    cursor = db.cursor()
    cursor.execute('''
    select
    data,
    key
    from database2
    where key = 'lastLocation' or key = 'appVersion' or key = 'userName' or key = 'userId'
    ''')

    all_rows1 = cursor.fetchall()
    usageentries = len(all_rows1)
    data_list_account = []

    if usageentries > 0:
        for row in all_rows1:

            plist_file_object = io.BytesIO(row[0])
            if row[0] is None:
                pass
            else:
                if row[0].find(b'NSKeyedArchiver') == -1:
                    if sys.version_info >= (3, 9):
                        plist = plistlib.load(plist_file_object)
                    else:
                        plist = biplist.readPlist(plist_file_object)
                else:
                    try:
                        plist = nd.deserialize_plist(plist_file_object)
                    except (nd.DeserializeError,
                            nd.biplist.NotBinaryPlistException,
                            nd.biplist.InvalidPlistException,
                            nd.plistlib.InvalidFileException,
                            nd.ccl_bplist.BplistError, ValueError, TypeError,
                            OSError, OverflowError) as ex:
                        logfunc(
                            f'Failed to read plist for {row[0]}, error was:' +
                            str(ex))

            if row[1] == 'userId':
                bumble_userId = plist.get('root', '')
                data_list_account.append(('User ID', str(bumble_userId)))

            elif row[1] == 'userName':
                bumble_userName = plist.get('root', '')
                data_list_account.append(('User Name', str(bumble_userName)))

            elif row[1] == 'lastLocation':
                bumble_timestamp = datetime.datetime.utcfromtimestamp(
                    int(plist['kCLLocationCodingKeyTimestamp']) + 978307200)
                bumble_lastlat = plist.get(
                    'kCLLocationCodingKeyRawCoordinateLatitude', '')
                bumble_lastlong = plist.get(
                    'kCLLocationCodingKeyRawCoordinateLongitude', '')

                data_list_account.append(('Timestamp', str(bumble_timestamp)))
                data_list_account.append(('Last Latitude', bumble_lastlat))
                data_list_account.append(('Last Longitude', bumble_lastlong))

            elif row[1] == 'appVersion':
                bumble_appVersion = plist.get('root', '')
                data_list_account.append(
                    ('App Version', str(bumble_appVersion)))

            else:
                pass

        description = 'Bumble - Account Details'
        report = ArtifactHtmlReport('Bumble - Account Details')
        report.start_artifact_report(report_folder, 'Bumble - Account Details')
        report.add_script()
        data_headers_account = (
            'Key', 'Values'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element

        report.write_artifact_data_table(data_headers_account,
                                         data_list_account, file_found)
        report.end_artifact_report()

        tsvname = f'Bumble - Account Details'
        tsv(report_folder, data_headers_account, data_list_account, tsvname)

        tlactivity = f'Bumble - Account Details'
        timeline(report_folder, tlactivity, data_list_account,
                 data_headers_account)

    else:
        logfunc('No Bumble - Account Details data available')

    db.close()
Esempio n. 5
0
def get_Xender(files_found, report_folder, seeker, wrap_text):

    source_file = ''

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('-db'):
            source_file = file_found.replace(seeker.directory, '')
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    try:
        cursor.execute('''
        SELECT device_id, nick_name FROM profile WHERE connect_times = 0
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Xender file transfer - contacts')
        report.start_artifact_report(report_folder,
                                     'Xender file transfer - contacts')
        report.add_script()
        data_headers = (
            'device_id', 'nick_name'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Xender file transfer - contacts'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

    else:
        logfunc('No Xender Contacts found')

    try:
        cursor.execute('''
        SELECT f_path, f_display_name, f_size_str, c_start_time/1000, c_direction, c_session_id, s_name, 
               s_device_id, r_name, r_device_id
          FROM new_history
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Xender file transfer - Messages')
        report.start_artifact_report(report_folder,
                                     'Xender file transfer - Messages')
        report.add_script()
        data_headers = (
            'file_path', 'file_display_name', 'file_size', 'timestamp',
            'direction', 'to_id', 'from_id', 'session_id', 'sender_name',
            'sender_device_id', 'recipient_name', 'recipient_device_id'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            from_id = ''
            to_id = ''
            if (row[4] == 1):
                direction = 'Outgoing'
                to_id = row[6]
            else:
                direction = 'Incoming'
                from_id = row[6]

            createtime = datetime.datetime.fromtimestamp(int(
                row[3])).strftime('%Y-%m-%d %H:%M:%S')

            data_list.append(
                (row[0], row[1], row[2], createtime, direction, to_id, from_id,
                 row[5], row[6], row[7], row[8], row[9]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Xender file transfer - Messages'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

        tlactivity = f'Xender file transfer - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Xender file transfer messages data available')

    db.close()
    return
Esempio n. 6
0
def get_imoHD_Chat(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.sqlite'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    case ZIMOCHATMSG.ZTS
        when 0 then ''
        else datetime(ZTS/1000000000,'unixepoch')
    end  as "Timestamp",
    ZIMOCONTACT.ZDISPLAY as "Sender Display Name",
    ZIMOCHATMSG.ZALIAS as "Sender Alias",
    ZIMOCONTACT.ZDIGIT_PHONE,
    ZIMOCHATMSG.ZTEXT as "Message",
    case ZIMOCHATMSG.ZISSENT
        when 0 then 'Received'
        when 1 then 'Sent'
    end as "Message Status",
    ZIMOCHATMSG.ZIMDATA
    from ZIMOCHATMSG
    left join ZIMOCONTACT ON ZIMOCONTACT.ZBUID = ZIMOCHATMSG.ZA_UID
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:

            plist = ''
            timestamp = row[0]
            senderName = row[1]
            senderAlias = row[2]
            senderPhone = row[3]
            message = row[4]
            messageStatus = row[5]
            itemAction = ''
            attachmentURL = ''
            thumb = ''

            plist_file_object = io.BytesIO(row[6])
            if row[6] is None:
                pass
            else:
                if row[6].find(b'NSKeyedArchiver') == -1:
                    if sys.version_info >= (3, 9):
                        plist = plistlib.load(plist_file_object)
                    else:
                        plist = biplist.readPlist(plist_file_object)
                else:
                    try:
                        plist = nd.deserialize_plist(plist_file_object)
                    except (nd.DeserializeError,
                            nd.biplist.NotBinaryPlistException,
                            nd.biplist.InvalidPlistException,
                            nd.plistlib.InvalidFileException,
                            nd.ccl_bplist.BplistError, ValueError, TypeError,
                            OSError, OverflowError) as ex:
                        logfunc(
                            f'Failed to read plist for {row[0]}, error was:' +
                            str(ex))

                itemAction = plist['type']

                #Check for Attachments
                if plist.get('objects') is not None:
                    attachmentName = plist['objects'][0]['object_id']
                    attachmentURL = "https://cdn.imoim.us/s/object/" + attachmentName + "/"

                    for match in files_found:
                        if attachmentName in match:
                            shutil.copy2(match, report_folder)
                            data_file_name = os.path.basename(match)
                            thumb = f'<img src="{report_folder}/{data_file_name}"></img>'

                else:
                    attachmentURL = ''

            data_list.append(
                (timestamp, senderName, senderAlias, senderPhone, message,
                 messageStatus, itemAction, attachmentURL, thumb))

        description = 'IMO HD Chat - Messages'
        report = ArtifactHtmlReport('IMO HD Chat - Messages')
        report.start_artifact_report(report_folder, 'IMO HD Chat - Messages')
        report.add_script()
        data_headers = (
            'Timestamp', 'Sender Name', 'Sender Alias', 'Sender Phone',
            'Message', 'Message Status', 'Item Action', 'Attachment URL',
            'Attachment'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Attachment'])
        report.end_artifact_report()

        tsvname = f'IMO HD Chat - Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'IMO HD Chat - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('IMO HD Chat - Messages data available')

    cursor.execute('''
    select
    ZPH_NAME,
    ZALIAS,
    ZPHONE,
    "https://cdn.imoim.us/s/object/" || ZICON_ID || "/" as "Profile Pic",
    ZBUID
    from ZIMOCONTACT
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        description = 'IMO HD Chat - Contacts'
        report = ArtifactHtmlReport('IMO HD Chat - Contacts')
        report.start_artifact_report(report_folder, 'IMO HD Chat - Contacts')
        report.add_script()
        data_headers = (
            'Contact Name', 'Contact Alias', 'Contact Phone',
            'Profile Pic URL', 'User ID'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'IMO HD Chat - Contacts'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'IMO HD Chat - Contacts'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('IMO HD Chat - Contacts data available')

    db.close()
Esempio n. 7
0
def get_locationDappharvest(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
        return ()

    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	BUNDLEID AS "BUNDLE ID",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	ALTITUDE AS "ALTITUDE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	STATE AS "STATE",
	AGE AS "AGE",
	ROUTINEMODE AS "ROUTINE MODE",
	LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
	HEX(SIG) AS "SIG (HEX)",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE"
	FROM APPHARVEST
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD App Harvest')
        report.start_artifact_report(report_folder, 'App Harvest', description)
        report.add_script()
        data_headers = ('Timestamp', 'Bundle ID', 'Coordinates', 'Altitude',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'State',
                        'Age', 'Routine Mode', 'Location of Interest Type',
                        'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                        'Course', 'Confidence')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell App Harvest'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell App Harvest'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD App Harvest')

    db.close()
    return
Esempio n. 8
0
def process_recentactivity(folder, uid, report_folder):

    slash = '\\' if is_platform_windows() else '/'

    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()
    #Create table recent.
    cursor.execute('''
    CREATE TABLE 
    recent(task_id TEXT, effective_uid TEXT, affinity TEXT, real_activity TEXT, first_active_time TEXT, last_active_time TEXT,
    last_time_moved TEXT, calling_package TEXT, user_id TEXT, action TEXT, component TEXT, snap TEXT,recimg TXT, fullat1 TEXT, fullat2 TEXT)
    ''')
    db.commit()
    err = 0
    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)

    for filename in glob.iglob(os.path.join(folder, 'recent_tasks', '**'),
                               recursive=True):
        if os.path.isfile(filename):  # filter dirs
            file_name = os.path.basename(filename)
            #logfunc(filename)
            #logfunc(file_name)
            #numid = file_name.split('_')[0]

            try:
                ET.parse(filename)
            except ET.ParseError:
                logfunc('Parse error - Non XML file? at: ' + filename)
                err = 1
                #print(filename)

            if err == 1:
                err = 0
                continue
            else:
                tree = ET.parse(filename)
                root = tree.getroot()
                #print('Processed: '+filename)
                for child in root:
                    #All attributes. Get them in using json dump thing
                    fullat1 = json.dumps(root.attrib)
                    task_id = (root.attrib.get('task_id'))
                    effective_uid = (root.attrib.get('effective_uid'))
                    affinity = (root.attrib.get('affinity'))
                    real_activity = (root.attrib.get('real_activity'))
                    first_active_time = (root.attrib.get('first_active_time'))
                    last_active_time = (root.attrib.get('last_active_time'))
                    last_time_moved = (root.attrib.get('last_time_moved'))
                    calling_package = (root.attrib.get('calling_package'))
                    user_id = (root.attrib.get('user_id'))
                    #print(root.attrib.get('task_description_icon_filename'))

                    #All attributes. Get them in using json dump thing
                    fullat2 = json.dumps(child.attrib)
                    action = (child.attrib.get('action'))
                    component = (child.attrib.get('component'))
                    icon_image_path = (
                        root.attrib.get('task_description_icon_filename'))

                    #Snapshot section picture
                    snapshot = task_id + '.jpg'
                    #print(snapshot)

                    #check for image in directories
                    check1 = os.path.join(folder, 'snapshots', snapshot)
                    isit1 = os.path.isfile(check1)
                    if isit1:
                        #copy snaphot image to report folder
                        shutil.copy2(check1, report_folder)
                        #snap = r'./snapshots/' + snapshot
                        snap = snapshot
                    else:
                        snap = 'NO IMAGE'
                    #Recent_images section
                    if icon_image_path is not None:
                        recent_image = os.path.basename(icon_image_path)
                        check2 = os.path.join(folder, 'recent_images',
                                              recent_image)
                        isit2 = os.path.isfile(check2)
                        if isit2:
                            shutil.copy2(check2, report_folder)
                            #recimg = r'./recent_images/' + recent_image
                            recimg = recent_image
                        else:
                            recimg = 'NO IMAGE'
                    else:
                        #check for other files not in the XML - all types
                        check3 = glob.glob(
                            os.path.join(folder, 'recent_images', task_id,
                                         '*.*'))
                        if check3:
                            check3 = check3[0]
                            isit3 = os.path.isfile(check3)
                        else:
                            isit3 = 0

                        if isit3:
                            shutil.copy2(check3, report_folder)
                            recimg = os.path.basename(check3)
                        else:
                            recimg = 'NO IMAGE'
                    #else:
                    #    recimg = 'NO IMAGE'
                    #insert all items in database
                    cursor = db.cursor()
                    datainsert = (
                        task_id,
                        effective_uid,
                        affinity,
                        real_activity,
                        first_active_time,
                        last_active_time,
                        last_time_moved,
                        calling_package,
                        user_id,
                        action,
                        component,
                        snap,
                        recimg,
                        fullat1,
                        fullat2,
                    )
                    cursor.execute(
                        'INSERT INTO recent (task_id, effective_uid, affinity, real_activity, first_active_time, last_active_time, last_time_moved, calling_package, user_id, action, component, snap, recimg, fullat1, fullat2)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
                        datainsert)
                    db.commit()

    report = ArtifactHtmlReport('Recent Tasks, Snapshots & Images')
    location = os.path.join(folder, 'recent_tasks')
    report.start_artifact_report(report_folder, f'Recent Activity_{uid}',
                                 f'Artifacts located at {location}')
    report.add_script()
    data_headers = ('Key', 'Value')
    image_data_headers = ('Snapshot_Image', 'Recent_Image')

    #Query to create report
    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT 
        task_id as Task_ID, 
        effective_uid as Effective_UID, 
        affinity as Affinity, 
        real_activity as Real_Activity, 
        datetime(first_active_time/1000, 'UNIXEPOCH') as First_Active_Time, 
        datetime(last_active_time/1000, 'UNIXEPOCH') as Last_Active_Time,
        datetime(last_time_moved/1000, 'UNIXEPOCH') as Last_Time_Moved,
        calling_package as Calling_Package, 
        user_id as User_ID, 
        action as Action, 
        component as Component, 
        snap as Snapshot_Image, 
        recimg as Recent_Image
    FROM recent
    ''')
    all_rows = cursor.fetchall()
    colnames = cursor.description

    for row in all_rows:
        if row[2] is None:
            row2 = ''  #'NO DATA'
        else:
            row2 = row[2]

        report.write_minor_header(f'Application: {row2}')

        #do loop for headers
        data_list = []

        for x in range(0, 13):
            if row[x] is None:
                pass
            else:
                data_list.append((colnames[x][0], str(row[x])))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         folder,
                                         table_id='',
                                         write_total=False,
                                         write_location=False,
                                         cols_repeated_at_bottom=False)

        image_data_row = []
        image_data_list = [image_data_row]

        if row[11] == 'NO IMAGE':
            image_data_row.append('No Image')
        else:
            image_data_row.append(
                '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                .format(str(row[11]), folder_name))
        if row[12] == 'NO IMAGE':
            image_data_row.append('No Image')
        else:
            image_data_row.append(
                '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                .format(str(row[12]), folder_name))
        report.write_artifact_data_table(image_data_headers,
                                         image_data_list,
                                         folder,
                                         table_id='',
                                         table_style="width: auto",
                                         write_total=False,
                                         write_location=False,
                                         html_escape=False,
                                         cols_repeated_at_bottom=False)
        report.write_raw_html('<br />')

    report.end_artifact_report()
Esempio n. 9
0
def get_knowClocked(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') as "START", 
	DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') as "END",
	CASE ZOBJECT.ZVALUEINTEGER
	WHEN '0' THEN 'UNLOCKED' 
	WHEN '1' THEN 'LOCKED' 
	END "IS LOCKED",
	(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",  
	CASE ZOBJECT.ZSTARTDAYOFWEEK 
	WHEN "1" THEN "Sunday"
	WHEN "2" THEN "Monday"
	WHEN "3" THEN "Tuesday"
	WHEN "4" THEN "Wednesday"
	WHEN "5" THEN "Thursday"
	WHEN "6" THEN "Friday"
	WHEN "7" THEN "Saturday"
	END "DAY OF WEEK",
	ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
	DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') as "START", 
	DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') as "END",
	DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') as "ENTRY CREATION", 
	ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
	FROM
	ZOBJECT 
	LEFT JOIN
	ZSTRUCTUREDMETADATA 
	ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
	LEFT JOIN
	ZSOURCE 
	ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
	ZSTREAMNAME LIKE "/device/isLocked"
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Device Locked')
        report.start_artifact_report(report_folder, 'Device Locked',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Is Locked?', 'Usage in Seconds',
                        'Day of the Week', 'GMT Offset', 'Entry Creation',
                        'ZOBJECT Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Device Locked'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Device Locked'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for KnowledgeC Device Locked')
def get_chromeDownloads(files_found, report_folder, seeker):
    
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    tab_url,
    CASE
        start_time  
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(start_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "Start Time", 
    CASE
        end_time 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(end_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "End Time", 
    CASE
        last_access_time 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(last_access_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "Last Access Time", 
    target_path,
    state,
    opened,
    received_bytes,
    total_bytes
    FROM
    downloads
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Chrome Downloads')
        report.start_artifact_report(report_folder, 'Downloads')
        report.add_script()
        data_headers = ('URL','Start Time','End Time','Last Access Time','Target Path','State','Opened?','Received Bytes','Total Bytes' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Chrome download data available')
    
    db.close()
    return
Esempio n. 11
0
def get_permissions(files_found, report_folder, seeker, wrap_text):

    slash = '\\' if is_platform_windows() else '/'

    for file_found in files_found:
        file_found = str(file_found)

        data_list_permission_trees = []
        data_list_permissions = []
        data_list_packages_su = []
        err = 0
        user = ''

        parts = file_found.split(slash)
        if 'mirror' in parts:
            user = '******'

        if user == 'mirror':
            continue
        else:
            try:
                if (checkabx(file_found)):
                    multi_root = False
                    tree = abxread(file_found, multi_root)
                else:
                    tree = ET.parse(file_found)

            except ET.ParseError:
                logfunc('Parse error - Non XML file.')
                err = 1

            if err == 0:
                root = tree.getroot()

                for elem in root:
                    #print('TAG LVL 1 '+ elem.tag, elem.attrib)
                    #role = elem.attrib['name']
                    #print()
                    if elem.tag == 'permission-trees':
                        for subelem in elem:
                            #print(elem.tag +' '+ subelem.tag, subelem.attrib)
                            data_list_permission_trees.append(
                                (subelem.attrib.get('name', ''),
                                 subelem.attrib.get('package', '')))
                    elif elem.tag == 'permissions':
                        for subelem in elem:
                            data_list_permissions.append(
                                (subelem.attrib.get('name', ''),
                                 subelem.attrib.get('package', ''),
                                 subelem.attrib.get('protection', '')))
                            #print(elem.tag +' '+ subelem.tag, subelem.attrib)
                    else:
                        for subelem in elem:
                            if subelem.tag == 'perms':
                                for sub_subelem in subelem:
                                    #print(elem.tag, elem.attrib['name'], sub_subelem.attrib['name'], sub_subelem.attrib['granted'] )

                                    data_list_packages_su.append(
                                        (elem.tag, elem.attrib.get('name', ''),
                                         sub_subelem.attrib.get('name', ''),
                                         sub_subelem.attrib.get('granted',
                                                                '')))

                if len(data_list_permission_trees) > 0:
                    report = ArtifactHtmlReport('Permission Trees')
                    report.start_artifact_report(report_folder,
                                                 f'Permission Trees')
                    report.add_script()
                    data_headers = ('Name', 'Package')
                    report.write_artifact_data_table(
                        data_headers, data_list_permission_trees, file_found)
                    report.end_artifact_report()

                    tsvname = f'Permission Trees'
                    tsv(report_folder, data_headers,
                        data_list_permission_trees, tsvname)

                if len(data_list_permissions) > 0:
                    report = ArtifactHtmlReport('Permissions')
                    report.start_artifact_report(report_folder, f'Permissions')
                    report.add_script()
                    data_headers = ('Name', 'Package', 'Protection')
                    report.write_artifact_data_table(data_headers,
                                                     data_list_permissions,
                                                     file_found)
                    report.end_artifact_report()

                    tsvname = f'Permissions'
                    tsv(report_folder, data_headers, data_list_permissions,
                        tsvname)

                if len(data_list_packages_su) > 0:
                    report = ArtifactHtmlReport('Package and Shared User')
                    report.start_artifact_report(report_folder,
                                                 f'Package and Shared User')
                    report.add_script()
                    data_headers = ('Type', 'Package', 'Permission',
                                    'Granted?')
                    report.write_artifact_data_table(data_headers,
                                                     data_list_packages_su,
                                                     file_found)
                    report.end_artifact_report()

                    tsvname = f'Permissions - Packages and Shared User'
                    tsv(report_folder, data_headers, data_list_packages_su,
                        tsvname)
Esempio n. 12
0
def get_ControlCenter(files_found, report_folder, seeker):
    data_list_disabled = []
    data_list_modules = []
    data_list_userenabled = []
    count = 0
   
    for file_found in files_found:
        file_found = str(file_found)

        with open(file_found, 'rb') as f:
            pl = plistlib.load(f)
            
            check_var = pl.get('disabled-module-identifiers','notfound')
            
            if check_var == 'notfound':
                pass
            else:
                if len(pl['disabled-module-identifiers']) > 0:
                    for module1 in pl['disabled-module-identifiers']:
                        count += 1
                        data_list_disabled.append((count, module1))
                else: pass
            
            count = 0
            check_var = pl.get('module-identifiers','notfound')
            
            if check_var == 'notfound':
                pass
            else:
                if len(pl['module-identifiers']) > 0:
                    for module2 in pl['module-identifiers']:
                        count += 1
                        data_list_modules.append((count, module2))
                        
                else: pass
            
            count = 0
            check_var = pl.get('userenabled-fixed-module-identifiers','notfound')
            
            if check_var == 'notfound':
                pass
            else:
                if len(pl['userenabled-fixed-module-identifiers']) > 0:
                    for module3 in pl['userenabled-fixed-module-identifiers']:
                        count += 1
                        data_list_userenabled.append((count, module3))    
                else: pass
            
    if len(data_list_disabled) > 0:
        description = 'Controls disabled in the Control Center'
        report = ArtifactHtmlReport('Control Center - Disabled Controls')
        report.start_artifact_report(report_folder, 'Control Center - Disabled Controls')
        report.add_script()
        data_headers = ['Position','App Bundle']
        report.write_artifact_data_table(data_headers, data_list_disabled, file_found)
        report.end_artifact_report()
        
        tsvname = 'Control Center - Disabled Controls'
        tsv(report_folder, data_headers, data_list_disabled, tsvname)
        
    else:
        logfunc('No Control Center - Disabled Controls data available')
            
    if len(data_list_modules) > 0:
        description = 'Controls that are active and added to Control Center'
        report = ArtifactHtmlReport('Control Center - Active Controls')
        report.start_artifact_report(report_folder, 'Control Center - Active Controls')
        report.add_script()
        data_headers = ['Position','App Bundle']
        report.write_artifact_data_table(data_headers, data_list_modules, file_found)
        report.end_artifact_report()
        
        tsvname = 'Control Center - Active Controls'
        tsv(report_folder, data_headers, data_list_modules, tsvname)
    
    else:
        logfunc('No Control Center - Active Controls data available')
        
    if len(data_list_userenabled) > 0:
        description = 'Controls that have been added by the user via a hard toggle to the Control Center'
        report = ArtifactHtmlReport('Control Center - User Toggled Controls')
        report.start_artifact_report(report_folder, 'Control Center - User Toggled Controls')
        report.add_script()
        data_headers = ['Position','App Bundle']
        report.write_artifact_data_table(data_headers, data_list_userenabled, file_found)
        report.end_artifact_report()
        
        tsvname = 'Control Center - User Toggled Controls'
        tsv(report_folder, data_headers, data_list_userenabled, tsvname)
    
    else:
        logfunc('No Control Center - User Toggled Controls data available')
Esempio n. 13
0
def get_appleWifiPlist(files_found, report_folder, seeker):
    known_data_list = []
    scanned_data_list = []
    known_files = []
    scanned_files = []
    for file_found in files_found:
        file_found = str(file_found)

        with open(file_found, 'rb') as f:
            deserialized = plistlib.load(f)
            if 'KeepWiFiPoweredAirplaneMode' in deserialized:
                val = (deserialized['KeepWiFiPoweredAirplaneMode'])
                logdevinfo(f"Keep Wifi Powered Airplane Mode: {val}")

            if 'List of known networks' in deserialized:
                known_files.append(file_found)
                for known_network in deserialized['List of known networks']:
                    ssid = ''
                    bssid = ''
                    last_updated = ''
                    last_auto_joined = ''
                    wnpmd = ''
                    net_usage = ''
                    country_code = ''
                    device_name = ''
                    manufacturer = ''
                    serial_number = ''
                    model_name = ''
                    enabled = ''
                    last_joined = ''

                    if 'SSID_STR' in known_network:
                        ssid = str(known_network['SSID_STR'])

                    if 'BSSID' in known_network:
                        bssid = str(known_network['BSSID'])

                    if 'networkUsage' in known_network:
                        net_usage = str(known_network['networkUsage'])

                    if '80211D_IE' in known_network:
                        if 'IE_KEY_80211D_COUNTRY_CODE' in known_network[
                                '80211D_IE']:
                            country_code = str(known_network['80211D_IE']
                                               ['IE_KEY_80211D_COUNTRY_CODE'])

                    if 'lastUpdated' in known_network:
                        last_updated = str(known_network['lastUpdated'])

                    if 'lastAutoJoined' in known_network:
                        last_auto_joined = str(known_network['lastAutoJoined'])

                    if 'lastJoined' in known_network:
                        last_joined = str(known_network['lastJoined'])

                    if 'WiFiNetworkPasswordModificationDate' in known_network:
                        wnpmd = str(known_network[
                            'WiFiNetworkPasswordModificationDate'])

                    if 'enabled' in known_network:
                        enabled = str(known_network['enabled'])

                    if 'WPS_PROB_RESP_IE' in known_network:

                        if 'IE_KEY_WPS_DEV_NAME' in known_network[
                                'WPS_PROB_RESP_IE']:
                            device_name = known_network['WPS_PROB_RESP_IE'][
                                'IE_KEY_WPS_DEV_NAME']
                        if 'IE_KEY_WPS_MANUFACTURER' in known_network[
                                'WPS_PROB_RESP_IE']:
                            manufacturer = known_network['WPS_PROB_RESP_IE'][
                                'IE_KEY_WPS_MANUFACTURER']
                        if 'IE_KEY_WPS_SERIAL_NUM' in known_network[
                                'WPS_PROB_RESP_IE']:
                            serial_number = known_network['WPS_PROB_RESP_IE'][
                                'IE_KEY_WPS_SERIAL_NUM']
                        if 'IE_KEY_WPS_MODEL_NAME' in known_network[
                                'WPS_PROB_RESP_IE']:
                            model_name = known_network['WPS_PROB_RESP_IE'][
                                'IE_KEY_WPS_MODEL_NAME']

                    known_data_list.append([
                        ssid, bssid, net_usage, country_code, device_name,
                        manufacturer, serial_number, model_name, last_joined,
                        last_auto_joined, last_updated, enabled, wnpmd,
                        file_found
                    ])

            if 'List of scanned networks with private mac' in deserialized:
                scanned_files.append(file_found)
                for scanned_network in deserialized[
                        'List of scanned networks with private mac']:
                    ssid = ''
                    bssid = ''
                    last_updated = ''
                    last_joined = ''
                    private_mac_in_use = ''
                    private_mac_value = ''
                    private_mac_valid = ''
                    added_at = ''
                    in_known_networks = ''

                    if 'SSID_STR' in scanned_network:
                        ssid = str(scanned_network['SSID_STR'])

                    if 'BSSID' in scanned_network:
                        bssid = str(scanned_network['BSSID'])

                    if 'lastUpdated' in scanned_network:
                        last_updated = str(scanned_network['lastUpdated'])

                    if 'lastJoined' in scanned_network:
                        last_joined = str(scanned_network['lastJoined'])

                    if 'addedAt' in scanned_network:
                        added_at = str(scanned_network['addedAt'])

                    if 'PresentInKnownNetworks' in scanned_network:
                        in_known_networks = str(
                            scanned_network['PresentInKnownNetworks'])

                    if 'PRIVATE_MAC_ADDRESS' in scanned_network:
                        if 'PRIVATE_MAC_ADDRESS_IN_USE' in scanned_network[
                                'PRIVATE_MAC_ADDRESS']:
                            private_mac_in_use = str(
                                _bytes_to_mac_address(
                                    scanned_network['PRIVATE_MAC_ADDRESS']
                                    ['PRIVATE_MAC_ADDRESS_IN_USE']))
                        if 'PRIVATE_MAC_ADDRESS_VALUE' in scanned_network[
                                'PRIVATE_MAC_ADDRESS']:
                            private_mac_value = str(
                                _bytes_to_mac_address(
                                    scanned_network['PRIVATE_MAC_ADDRESS']
                                    ['PRIVATE_MAC_ADDRESS_VALUE']))
                        if 'PRIVATE_MAC_ADDRESS_VALID' in scanned_network[
                                'PRIVATE_MAC_ADDRESS']:
                            private_mac_valid = str(
                                scanned_network['PRIVATE_MAC_ADDRESS']
                                ['PRIVATE_MAC_ADDRESS_VALID'])

                    scanned_data_list.append([
                        ssid, bssid, added_at, last_joined, last_updated,
                        private_mac_in_use, private_mac_value,
                        private_mac_valid, in_known_networks, file_found
                    ])

    if len(known_data_list) > 0:
        description = 'WiFi known networks data. Dates are taken straight from the source plist.'
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder, 'WiFi Known Networks',
                                     description)
        report.add_script()
        data_headers = [
            'SSID', 'BSSID', 'Network Usage', 'Country Code', 'Device Name',
            'Manufacturer', 'Serial Number', 'Model Name', 'Last Joined',
            'Last Auto Joined', 'Last Updated', 'Enabled',
            'WiFi Network Password Modification Date', 'File'
        ]
        report.write_artifact_data_table(data_headers, known_data_list,
                                         ', '.join(known_files))
        report.end_artifact_report()

        tsvname = 'WiFi Known Networks'
        tsv(report_folder, data_headers, known_data_list, tsvname)

        tlactivity = 'WiFi Known Networks'
        timeline(report_folder, tlactivity, known_data_list, data_headers)

    if len(scanned_data_list) > 0:
        description = 'WiFi networks scanned while using fake ("private") MAC address. Dates are taken straight from the source plist.'
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder,
                                     'WiFi Networks Scanned (private)',
                                     description)
        report.add_script()
        data_headers = [
            'SSID', 'BSSID', 'Added At', 'Last Joined', 'Last Updated',
            'MAC Used For Network', 'Private MAC Computed For Network',
            'MAC Valid', 'In Known Networks', 'File'
        ]
        report.write_artifact_data_table(data_headers, scanned_data_list,
                                         ', '.join(scanned_files))
        report.end_artifact_report()

        tsvname = 'WiFi Networks Scanned (private)'
        tsv(report_folder, data_headers, scanned_data_list, tsvname)

        tlactivity = 'WiFi Networks Scanned (private)'
        timeline(report_folder, tlactivity, scanned_data_list, data_headers)
Esempio n. 14
0
def process_usagestats(folder, uid, report_folder):

    processed = 0
    
    #Create sqlite databases
    db = sqlite3.connect(os.path.join(report_folder, 'usagestats_{}.db'.format(uid)))
    cursor = db.cursor()

    #Create table usagedata.

    cursor.execute('''
        CREATE TABLE data(usage_type TEXT, lastime INTEGER, timeactive INTEGER,
                          last_time_service_used INTEGER, last_time_visible INTEGER, total_time_visible INTEGER,
                          app_launch_count INTEGER,
                          package TEXT, types TEXT, classs TEXT,
                          source TEXT, fullatt TEXT)
    ''')

    db.commit()

    err=0
    stats = None

    for filename in glob.iglob(os.path.join(folder, '**'), recursive=True):
        if os.path.isfile(filename): # filter dirs
            file_name = os.path.basename(filename)
            #Test if xml is well formed
            if file_name == 'version':
                continue    
            else:
                if 'daily' in filename:
                    sourced = 'daily'
                elif 'weekly' in filename:
                    sourced = 'weekly'
                elif 'monthly' in filename:
                    sourced = 'monthly'
                elif 'yearly' in filename:
                    sourced = 'yearly'
                
                try:
                    file_name_int = int(file_name)
                except: 
                    logfunc('Invalid filename: ')
                    logfunc(filename)
                    logfunc('')
                    err = 1
                
                try:
                    ET.parse(filename)
                except ET.ParseError:
                    # Perhaps an Android Q protobuf file
                    try:
                        stats = ReadUsageStatsPbFile(filename)
                        err = 0
                    except:
                        logfunc('Parse error - Non XML and Non Protobuf file? at: ')
                        logfunc(filename)
                        logfunc('')
                        err = 1
                        #print(filename)
                    if stats:
                        #print('Processing - '+filename)
                        #print('')
                        AddEntriesToDb(sourced, file_name_int, stats, db)
                        continue
                
                if err == 1:
                    err = 0
                    continue
                else:
                    tree = ET.parse(filename)
                    root = tree.getroot()
                    print('Processing: '+filename)
                    print('')
                    for elem in root:
                        #print(elem.tag)
                        usagetype = elem.tag
                        #print("Usage type: "+usagetype)
                        if usagetype == 'packages':
                            for subelem in elem:
                                #print(subelem.attrib)
                                fullatti_str = json.dumps(subelem.attrib)
                                #print(subelem.attrib['lastTimeActive'])
                                time1 = subelem.attrib['lastTimeActive']
                                time1 = int(time1)
                                if time1 < 0:
                                    finalt = abs(time1)
                                else:
                                    finalt = file_name_int + time1
                                #print('final time: ')
                                #print(finalt)
                                #print(subelem.attrib['package'])
                                pkg = (subelem.attrib['package'])
                                #print(subelem.attrib['timeActive'])
                                tac = (subelem.attrib['timeActive'])
                                #print(subelem.attrib['lastEvent'])
                                alc = (subelem.attrib.get('appLaunchCount', ''))
                                #insert in database
                                cursor = db.cursor()
                                datainsert = (usagetype, finalt, tac, '', '', '', alc, pkg, '', '', sourced, fullatti_str,)
                                #print(datainsert)
                                cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, last_time_service_used, last_time_visible, total_time_visible, '
                                               'app_launch_count, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?)', datainsert)
                                db.commit()
                        
                        elif usagetype == 'configurations':
                            for subelem in elem:
                                fullatti_str = json.dumps(subelem.attrib)
                                #print(subelem.attrib['lastTimeActive'])
                                time1 = subelem.attrib['lastTimeActive']
                                time1 = int(time1)
                                if time1 < 0:
                                    finalt = abs(time1)
                                else:
                                    finalt = file_name_int + time1
                                #print('final time: ')
                                #print(finalt)
                                #print(subelem.attrib['timeActive'])
                                tac = (subelem.attrib['timeActive'])
                                #print(subelem.attrib)
                                #insert in database
                                cursor = db.cursor()
                                datainsert = (usagetype, finalt, tac, '', '', '', '', '', '', '', sourced, fullatti_str,)
                                #print(datainsert)
                                cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, last_time_service_used, last_time_visible, total_time_visible, '
                                               'app_launch_count, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?)', datainsert)                            
                                #datainsert = (usagetype, finalt, tac, '' , '' , '' , sourced, fullatti_str,)
                                #cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?)', datainsert)
                                db.commit()
                
                        elif usagetype == 'event-log':
                            for subelem in elem:
                                #print(subelem.attrib['time'])
                                time1 = subelem.attrib['time']
                                time1 = int(time1)
                                if time1 < 0:
                                    finalt = abs(time1)
                                else:
                                    finalt = file_name_int + time1
                                
                                #time1 = subelem.attrib['time']
                                #finalt = file_name_int + int(time1)
                                #print('final time: ')
                                #print(finalt)
                                #print(subelem.attrib['package'])
                                pkg = (subelem.attrib['package'])
                                #print(subelem.attrib['type'])
                                tipes = (subelem.attrib['type'])
                                #print(subelem.attrib)
                                fullatti_str = json.dumps(subelem.attrib)
                                #add variable for type conversion from number to text explanation
                                #print(subelem.attrib['fs'])
                                #classy = subelem.attrib['class']
                                if 'class' in subelem.attrib:
                                    classy = subelem.attrib['class']
                                    cursor = db.cursor()
                                    datainsert = (usagetype, finalt, '' , '' , '' , '' ,'' , pkg , tipes , classy , sourced, fullatti_str,)
                                    cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, last_time_service_used, last_time_visible, total_time_visible, '
                                               'app_launch_count, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?)', datainsert)
                                    db.commit()
                                else:
                                #insert in database
                                    cursor = db.cursor()
                                    cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, last_time_service_used, last_time_visible, total_time_visible, '
                                               'app_launch_count, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?)', datainsert)
                                    datainsert = (usagetype, finalt, '' , '' , '', '', '', pkg , tipes , '' , sourced, fullatti_str,)
                                    #cursor.execute('INSERT INTO data (usage_type, lastime, timeactive, package, types, classs, source, fullatt)  VALUES(?,?,?,?,?,?,?,?)', datainsert)
                                    db.commit()
                                    
    #query for reporting
    cursor.execute('''
    select 
    usage_type,
    case lastime WHEN '' THEN ''
     ELSE datetime(lastime/1000, 'UNIXEPOCH')
    end as lasttimeactive,
    timeactive as time_Active_in_msecs,
    timeactive/1000 as timeactive_in_secs,
    case last_time_service_used  WHEN '' THEN ''
     ELSE datetime(last_time_service_used/1000, 'UNIXEPOCH')
    end last_time_service_used,
    case last_time_visible  WHEN '' THEN ''
     ELSE datetime(last_time_visible/1000, 'UNIXEPOCH') 
    end last_time_visible,
    total_time_visible,
    app_launch_count,
    package,
    CASE types
         WHEN '1' THEN 'MOVE_TO_FOREGROUND'
         WHEN '2' THEN 'MOVE_TO_BACKGROUND'
         WHEN '5' THEN 'CONFIGURATION_CHANGE'
         WHEN '7' THEN 'USER_INTERACTION'
         WHEN '8' THEN 'SHORTCUT_INVOCATION'
         ELSE types
    END types,
    classs,
    source,
    fullatt
    from data
    order by lasttimeactive DESC
    ''')
    all_rows = cursor.fetchall()

    report = ArtifactHtmlReport('Usagestats')
    report.start_artifact_report(report_folder, f'UsageStats_{uid}')
    report.add_script()
    data_headers = ('Usage Type', 'Last Time Active', 'Time Active in Msecs', 'Time Active in Secs', 
                    'Last Time Service Used', 'Last Time Visible', 'Total Time Visible', 'App Launch Count', 
                    'Package', 'Types', 'Class', 'Source')
    data_list = []

    for row in all_rows:
        usage_type = str(row[0])
        lasttimeactive = str(row[1])
        time_Active_in_msecs = str(row[2])
        timeactive_in_secs = str(row[3])
        last_time_service_used = str(row[4])
        last_time_visible = str(row[5])
        total_time_visible = str(row[6])
        app_launch_count = str(row[7])
        package = str(row[8])
        types = str(row[9])
        classs = str(row[10])
        source = str(row[11])

        data_list.append((usage_type, lasttimeactive, time_Active_in_msecs,
                timeactive_in_secs, last_time_service_used, 
                last_time_visible, total_time_visible,
                app_launch_count, package, types, classs, source))
        processed = processed + 1

    report.write_artifact_data_table(data_headers, data_list, folder)
    report.end_artifact_report()
    
    tsvname = f'usagestats'
    tsv(report_folder, data_headers, data_list, tsvname)

    logfunc(f'Records processed for user {uid}: {processed}')
    db.close()
Esempio n. 15
0
def get_appleMapsGroup(files_found, report_folder, seeker):
    versionnum = 0
    file_found = str(files_found[0])

    with open(file_found, 'rb') as f:
        deserialized_plist = plistlib.load(f)
        types = {
            '1': {
                'type': 'message',
                'message_typedef': {
                    '1': {
                        'type': 'int',
                        'name': ''
                    },
                    '2': {
                        'type': 'int',
                        'name': ''
                    },
                    '5': {
                        'type': 'message',
                        'message_typedef': {
                            '1': {
                                'type': 'double',
                                'name': 'Latitude'
                            },
                            '2': {
                                'type': 'double',
                                'name': 'Longitude'
                            },
                            '3': {
                                'type': 'double',
                                'name': ''
                            },
                            '4': {
                                'type': 'fixed64',
                                'name': ''
                            },
                            '5': {
                                'type': 'double',
                                'name': ''
                            }
                        },
                        'name': ''
                    },
                    '7': {
                        'type': 'int',
                        'name': ''
                    }
                },
                'name': ''
            }
        }
        try:
            internal_deserialized_plist, di = blackboxprotobuf.decode_message(
                (deserialized_plist['MapsActivity']), types)

            latitude = (internal_deserialized_plist['1']['5']['Latitude'])
            longitude = (internal_deserialized_plist['1']['5']['Longitude'])

            data_list = []
            data_list.append((latitude, longitude))
            report = ArtifactHtmlReport('Apple Maps Group')
            report.start_artifact_report(report_folder, 'Apple Maps Group')
            report.add_script()
            data_headers = ('Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Apple Maps Group'
            tsv(report_folder, data_headers, data_list, tsvname)
        except:
            logfunc('No data in Apple Maps Group')
Esempio n. 16
0
def get_cloudkitServerRecordData(file_found, report_folder, seeker):
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select z_pk, zserverrecorddata 
    from
    ziccloudsyncingobject
    where
    zserverrecorddata not null
    ''')

    note_data = []
    all_rows = cursor.fetchall()
    result_number = len(all_rows)
    if result_number > 0:

        for row in all_rows:
            
            filename = os.path.join(report_folder, 'zserverrecorddata_'+str(row[0])+'.bplist')
            output_file = open(filename, "wb") 
            output_file.write(row[1])
            output_file.close()
            
            deserialized_plist = nd.deserialize_plist(io.BytesIO(row[1]))
            creator_id = ''
            last_modified_id = ''
            creation_date = ''
            last_modified_date = ''
            last_modified_device = ''
            record_type = ''
            record_id = ''
            for item in deserialized_plist:
                if 'RecordCtime' in item:
                    creation_date = item['RecordCtime']
                elif 'RecordMtime' in item:
                    last_modified_date = item['RecordMtime']
                elif 'LastModifiedUserRecordID' in item:
                    last_modified_id = item['LastModifiedUserRecordID']['RecordName']
                elif 'CreatorUserRecordID' in item:
                    creator_id = item['CreatorUserRecordID']['RecordName']
                elif 'ModifiedByDevice' in item:
                    last_modified_device = item['ModifiedByDevice']
                elif 'RecordType' in item:
                    record_type = item['RecordType']
                elif 'RecordID' in item:
                    record_id = item['RecordID']['RecordName']
            
            note_data.append([record_id,record_type,creation_date,creator_id,last_modified_date,last_modified_id,last_modified_device])

        description = 'CloudKit Note Sharing - Notes information shared via CloudKit. Look up the Record ID in the ZICCLOUDSYYNCINGOBJECT.ZIDENTIFIER column. '
        report = ArtifactHtmlReport('Note Sharing')
        report.start_artifact_report(report_folder, 'Note Sharing', description)
        report.add_script()
        note_headers = ('Record ID','Record Type','Creation Date','Creator ID','Modified Date','Modifier ID','Modifier Device')     
        report.write_artifact_data_table(note_headers, note_data, file_found)
        report.end_artifact_report()
        
        tsvname = 'Cloudkit Note Sharing'
        tsv(report_folder, note_headers, note_data, tsvname)
    else:
        logfunc('No Cloudkit - Cloudkit Note Sharing data available')

    db.close()
Esempio n. 17
0
def get_powerlogPaireddevconf(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_paired_device_config.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            BUILD,
            DEVICE,
            HWMODEL,
            PAIRINGID AS "PAIRING ID",
            ID AS "PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG TABLE ID" 
        FROM
            PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG
        ''')
    else:
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_paired_device_config.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            BUILD,
            DEVICE,
            ID AS "PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG TABLE ID" 
        FROM
            PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG
        ''')
        
        
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        
        if version.parse(iOSversion) >= version.parse("10"):
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))

            report = ArtifactHtmlReport('Powerlog Paired Device Configuration')
            report.start_artifact_report(report_folder, 'Paired Device Configuration')
            report.add_script()
            data_headers = ('Timestamp','Build','Device','HW Model','Pairing ID','PairedDeviceConfig Table ID' )   
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Powerlog Paired Device Conf'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3]))
            
            report = ArtifactHtmlReport('Powerlog Paired Device Configuration')
            report.start_artifact_report(report_folder, 'Paired Device Configuration')
            report.add_script()
            data_headers = ('Timestamp','Build','Device','PairedDeviceConfig Table ID' )  
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Powerlog Paired Device Conf'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'Powerlog Paired Device Configuration'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return      
Esempio n. 18
0
def get_appicons(files_found, report_folder, seeker, wrap_text):
    sessions = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(
                file_found):  # skip folders (there shouldn't be any)
            continue
        elif not os.path.basename(
                file_found) == 'app_icons.db':  # skip -journal and other files
            continue

        file_name = os.path.basename(file_found)
        db = open_sqlite_db_readonly(file_found)
        db.row_factory = sqlite3.Row  # For fetching columns by name

        cursor = db.cursor()
        cursor.execute(
            '''SELECT componentName, profileid, lastUpdated, version, icon, label FROM icons ORDER BY componentName'''
        )
        # version appears to be the last part of version string for some, ie, if ver=2.3.4.91, then db only stores '91'
        # On others it changes 1.3.1 to '131'

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('App Icons')
            report.start_artifact_report(report_folder, 'App Icons')
            report.add_script()
            data_headers = ('App name', 'Package name', 'Main icon', 'Icons')
            data_list = []
            app = None
            apps = []
            last_package = None
            for row in all_rows:
                componentName = row['componentName']
                if componentName.find('/') > 0:
                    package, component = componentName.split('/', 1)
                else:
                    logfunc(
                        f'Warning: Different format detected, no component name found , only package name seen! component = {componentName}'
                    )
                    package = componentName
                    component = ''
                if package != last_package:
                    # new package name seen
                    last_package = package
                    app = App(package)
                    apps.append(app)
                app.icons[component] = (row['label'], row['icon'])

            for app in apps:
                if len(app.icons) == 1:
                    app.name, app.main_icon = list(app.icons.items())[0][1]
                    app.icons = {}
                else:
                    desired_key = app.package + '.'  # Look for component = 'com.xyz/com.zyz.'
                    if desired_key in app.icons:
                        app.name, app.main_icon = app.icons.get(desired_key)
                        del app.icons[desired_key]
                        continue
                    # If not found yet, look for component = 'com.xyz/com.zyz.*'
                    key_to_delete = None
                    for key in app.icons:
                        if key.startswith(desired_key):
                            app.name, app.main_icon = app.icons.get(key)
                            key_to_delete = key
                            break
                    if key_to_delete:
                        del app.icons[key]

            for app in apps:
                main_icon_html = ''
                other_icons_html = ''
                if app.main_icon:
                    icon_data = base64.b64encode(app.main_icon).decode("utf-8")
                    main_icon_html = f'<img src="data:image/png;base64,{icon_data}">'
                for k, v in app.icons.items():
                    if v[1]:  # sometimes icon is NULL in db
                        icon_data = base64.b64encode(v[1]).decode("utf-8")
                        other_icons_html += f'<img title="{v[0]}" src="data:image/png;base64,{icon_data}"> '
                data_list.append((escape(app.name), escape(app.package),
                                  main_icon_html, other_icons_html))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()
            return
Esempio n. 19
0
def get_screentimeGenerichour(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for Screentime App by Hour on iOS " +
                iOSversion)
        return ()

    if version.parse(iOSversion) >= version.parse("13"):
        cursor = db.cursor()
        cursor.execute('''
		SELECT
			DISTINCT
			DATETIME(ZUSAGEBLOCK.ZSTARTDATE+978307200,'UNIXEPOCH') AS 'HOUR',
			ZUSAGEBLOCK.ZSCREENTIMEINSECONDS AS 'SCREENTIME (SECONDS)',
			ZUSAGEBLOCK.ZSCREENTIMEINSECONDS/60.00 AS 'SCREENTIME (MINUTES)',	
			ZCOREUSER.ZGIVENNAME AS 'GIVEN NAME',
			ZCOREUSER.ZFAMILYNAME AS 'FAMILY NAME',
			ZCOREDEVICE.ZNAME AS 'NAME',
			CASE ZCOREDEVICE.ZPLATFORM
				WHEN 0 THEN 'Unknown'
				WHEN 1 THEN 'macOS'
				WHEN 2 THEN 'iOS'
				WHEN 4 THEN 'Apple Watch'
				ELSE ZPLATFORM
			END AS PLATFORM,
			ZCOREDEVICE.ZIDENTIFIER AS 'DEVICE ID',
			ZCOREDEVICE.ZLOCALUSERDEVICESTATE AS 'LOCAL USER DEVICE STATE',
			DATETIME(ZUSAGEBLOCK.ZLONGESTSESSIONSTARTDATE+978307200,'UNIXEPOCH') AS 'LONGEST SESSION START',
			DATETIME(ZUSAGEBLOCK.ZLONGESTSESSIONENDDATE+978307200,'UNIXEPOCH') AS 'LONGEST SESSION END',
			DATETIME(ZUSAGEBLOCK.ZLASTEVENTDATE+978307200,'UNIXEPOCH') AS 'LAST EVENT DATE',
			(ZLONGESTSESSIONENDDATE-ZLONGESTSESSIONSTARTDATE) AS 'LONGEST SESSION TIME (SECONDS)',
			(ZLONGESTSESSIONENDDATE-ZLONGESTSESSIONSTARTDATE)/60.00 AS 'LONGEST SESSION TIME (MINUTES)',
			ZCOREUSER.ZFAMILYMEMBERTYPE AS 'FAMILY MEMBER TYPE',
			ZCOREUSER.ZAPPLEID AS 'APPLE ID',
			ZCOREUSER.ZDSID AS 'DSID',
			ZCOREUSER.ZALTDSID AS 'ALT DSID'
		FROM ZUSAGETIMEDITEM
		LEFT JOIN ZUSAGECATEGORY ON ZUSAGECATEGORY.Z_PK == ZUSAGETIMEDITEM.ZCATEGORY
		LEFT JOIN ZUSAGEBLOCK ON ZUSAGECATEGORY.ZBLOCK == ZUSAGEBLOCK.Z_PK
		LEFT JOIN ZUSAGE ON ZUSAGEBLOCK.ZUSAGE == ZUSAGE.Z_PK
		LEFT JOIN ZCOREUSER ON ZUSAGE.ZUSER == ZCOREUSER.Z_PK
		LEFT JOIN ZCOREDEVICE ON ZUSAGE.ZDEVICE == ZCOREDEVICE.Z_PK
			''')
    else:
        cursor = db.cursor()
        cursor.execute('''
			SELECT
				DISTINCT
				DATETIME(ZUSAGEBLOCK.ZSTARTDATE+978307200,'UNIXEPOCH') AS 'HOUR',
				ZUSAGEBLOCK.ZSCREENTIMEINSECONDS AS 'SCREENTIME (SECONDS)',
				ZUSAGEBLOCK.ZSCREENTIMEINSECONDS/60.00 AS 'SCREENTIME (MINUTES)',	
				ZCOREUSER.ZGIVENNAME AS 'GIVEN NAME',
				ZCOREUSER.ZFAMILYNAME AS 'FAMILY NAME',
				ZCOREDEVICE.ZNAME AS 'NAME',
				ZCOREDEVICE.ZIDENTIFIER AS 'DEVICE ID',
				ZCOREDEVICE.ZLOCALUSERDEVICESTATE AS 'LOCAL USER DEVICE STATE',
				DATETIME(ZUSAGEBLOCK.ZLONGESTSESSIONSTARTDATE+978307200,'UNIXEPOCH') AS 'LONGEST SESSION START',
				DATETIME(ZUSAGEBLOCK.ZLONGESTSESSIONENDDATE+978307200,'UNIXEPOCH') AS 'LONGEST SESSION END',
				DATETIME(ZUSAGEBLOCK.ZLASTEVENTDATE+978307200,'UNIXEPOCH') AS 'LAST EVENT DATE',
				(ZLONGESTSESSIONENDDATE-ZLONGESTSESSIONSTARTDATE) AS 'LONGEST SESSION TIME (SECONDS)',
				(ZLONGESTSESSIONENDDATE-ZLONGESTSESSIONSTARTDATE)/60.00 AS 'LONGEST SESSION TIME (MINUTES)',
				ZCOREUSER.ZFAMILYMEMBERTYPE AS 'FAMILY MEMBER TYPE',
				ZCOREUSER.ZAPPLEID AS 'APPLE ID',
				ZCOREUSER.ZDSID AS 'DSID'
			FROM ZUSAGETIMEDITEM
			LEFT JOIN ZUSAGECATEGORY ON ZUSAGECATEGORY.Z_PK == ZUSAGETIMEDITEM.ZCATEGORY
			LEFT JOIN ZUSAGEBLOCK ON ZUSAGECATEGORY.ZBLOCK == ZUSAGEBLOCK.Z_PK
			LEFT JOIN ZUSAGE ON ZUSAGEBLOCK.ZUSAGE == ZUSAGE.Z_PK
			LEFT JOIN ZCOREUSER ON ZUSAGE.ZUSER == ZCOREUSER.Z_PK
			LEFT JOIN ZCOREDEVICE ON ZUSAGE.ZDEVICE == ZCOREDEVICE.Z_PK
			''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        if version.parse(iOSversion) >= version.parse("13"):

            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16], row[17]))

            report = ArtifactHtmlReport('Screentime Generic by Hour')
            report.start_artifact_report(report_folder, 'Generic by Hour')
            report.add_script()
            data_headers = ('Hour', 'Screentime in Seconds',
                            'Screentime in Minutes', 'Given Name',
                            'Family Name', 'Name', 'Platform', 'Device ID',
                            'Local User Device State', 'Longest Session Start',
                            'Longest Session End', 'Last Event Data',
                            'Longest Session Time in Seconds',
                            'Longest Session Time in Minutes',
                            'Family Member Type', 'Apple ID', 'DSID',
                            'Alt DSID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Screentime Generic by Hour'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            report = ArtifactHtmlReport('Screentime Generic by Hour')
            report.start_artifact_report(report_folder, 'Generic Hour')
            report.add_script()
            data_headers = ('Hour', 'Screentime in Seconds',
                            'Screentime in Minutes', 'Given Name',
                            'Family Name', 'Name', 'Device ID',
                            'Local User Device State', 'Longest Session Start',
                            'Longest Session End', 'Last Event Data',
                            'Longest Session Time in Seconds',
                            'Longest Session Time in Minutes',
                            'Family Member Type', 'Apple ID', 'DSID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Screentime Generic by Hour'
            tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table foe Screentime Generic by Hour')

    db.close()
    return
Esempio n. 20
0
def get_bluetoothConnections(files_found, report_folder, seeker, wrap_text):
    data_list = []
    file_found = str(files_found[0])

    name_value = ''
    timestamp_value = ''
    linkkey_value = ''
    macaddrf = ''
    first_round = True

    with open(file_found, "r") as f:
        for line in f:

            p = re.compile(r'(\[[0-9a-f]{2}(?::[0-9a-f]{2}){5}\])',
                           re.IGNORECASE)
            macaddr = re.findall(p, line)
            if macaddr:
                try:
                    if first_round == True:
                        first_round = False
                    else:
                        data_list.append((timestamp_value, name_value,
                                          macaddrf, linkkey_value))
                        name_value = ''
                        timestamp_value = ''
                        linkkey_value = ''
                    macaddrf = macaddr[0].strip('[]')
                    macaddrf = macaddrf.upper()
                except:
                    pass

            splits = line.split(' = ')

            if splits[0] == 'Name':
                key = 'Name'
                name_value = splits[1].strip()

            if splits[0] == 'Timestamp':
                key = 'Timestamp'
                timestamp_value = splits[1].strip()
                timestamp_value = datetime.datetime.fromtimestamp(
                    int(timestamp_value)).strftime('%Y-%m-%d %H:%M:%S')

            if splits[0] == 'LinkKey':
                key = 'LinkKey'
                linkkey_value = splits[1].strip()

    data_list.append((timestamp_value, name_value, macaddrf, linkkey_value))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Bluetooth Connections')
        report.start_artifact_report(report_folder, f'Bluetooth Connections')
        report.add_script()
        data_headers = ('First Connected Timestamp', 'Device Name',
                        'MAC Address', 'Link Key')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Bluetooth Connections'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Bluetooth Connections'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc(f'No Bluetooth Connections data available')

    data_list = []
    with open(file_found, "r") as f:
        for line in f:

            p = re.compile(r'(\[[0-9a-f]{2}(?::[0-9a-f]{2}){5}\])',
                           re.IGNORECASE)
            macaddr = re.findall(p, line)
            if macaddr:
                break
            if '=' in line:
                splits = line.split(' = ')
                data_list.append((splits[0], splits[1].strip()))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Bluetooth Adapter Information')
        report.start_artifact_report(report_folder,
                                     f'Bluetooth Adapter Information')
        report.add_script()
        data_headers = ('Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Bluetooth Adapter Information'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No Bluetooth Adapter Information data available')
Esempio n. 21
0
def get_knowCappactsafari(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
            DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
            ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__CONTENTDESCRIPTION AS "CONTENT DESCRIPTION",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYREQUIREDSTRING AS "USER ACTIVITY REQUIRED STRING",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMIDENTIFIER AS "ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYUUID AS "ACTIVITY UUID",
            ZSOURCE.ZSOURCEID AS "SOURCE ID",
            CASE ZOBJECT.ZSTARTDAYOFWEEK 
              WHEN "1" THEN "Sunday"
              WHEN "2" THEN "Monday"
              WHEN "3" THEN "Tuesday"
              WHEN "4" THEN "Wednesday"
              WHEN "5" THEN "Thursday"
              WHEN "6" THEN "Friday"
              WHEN "7" THEN "Saturday"
            END "DAY OF WEEK",
            ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
            DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
            DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE",
            ZOBJECT.ZUUID AS "UUID", 
            ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
          FROM
             ZOBJECT 
             LEFT JOIN
                ZSTRUCTUREDMETADATA 
                ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
             LEFT JOIN
                ZSOURCE 
                ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
          WHERE
             ZSTREAMNAME IS "/app/activity"
             AND ("BUNDLE ID" = "com.apple.mobilesafari" OR "BUNDLE ID" = "com.apple.Safari")
        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
            DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
            ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMIDENTIFIER AS "ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
            ZSOURCE.ZSOURCEID AS "SOURCE ID",
            CASE ZOBJECT.ZSTARTDAYOFWEEK 
              WHEN "1" THEN "Sunday"
              WHEN "2" THEN "Monday"
              WHEN "3" THEN "Tuesday"
              WHEN "4" THEN "Wednesday"
              WHEN "5" THEN "Thursday"
              WHEN "6" THEN "Friday"
              WHEN "7" THEN "Saturday"
            END "DAY OF WEEK",
            ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
            DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
            DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE",
            ZOBJECT.ZUUID AS "UUID", 
            ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
          FROM
             ZOBJECT 
             LEFT JOIN
                ZSTRUCTUREDMETADATA 
                ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
             LEFT JOIN
                ZSOURCE 
                ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
          WHERE
             ZSTREAMNAME IS "/app/activity"
             AND ("BUNDLE ID" = "com.apple.mobilesafari" OR "BUNDLE ID" = "com.apple.Safari")
            ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []

        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            report = ArtifactHtmlReport(
                'KnowledgeC Application Activity Safari')
            report.start_artifact_report(report_folder,
                                         'Application Activity Safari')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Activity Type',
                            'Content Description', 'Content URL',
                            'User Activity Required String', 'ID', 'Unique ID',
                            'Activity UUID', 'Source ID', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'Expiration Date',
                            'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity Safari'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity Safari'
            timeline(report_folder, tlactivity, data_list)
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            report = ArtifactHtmlReport(
                'KnowledgeC Application Activty Safari')
            report.start_artifact_report(report_folder,
                                         'Application Activity Safari')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Activity Type',
                            'Content URL', 'ID', 'Unique ID', 'Source ID',
                            'Day of Week', 'GMT Offset', 'Entry Creation',
                            'Expiration Date', 'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity Safari'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity Safari'
            timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 22
0
def get_chromeLoginData(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'Login Data':  # skip -journal and other files
            continue
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        username_value,
        password_value,
        CASE date_created 
            WHEN "0" THEN "" 
            ELSE datetime(date_created / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
            END AS "date_created_win_epoch", 
        CASE date_created WHEN "0" THEN "" 
            ELSE datetime(date_created / 1000000 + (strftime('%s', '1970-01-01')), "unixepoch")
            END AS "date_created_unix_epoch",
        origin_url,
        blacklisted_by_user
        FROM logins
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} Login Data')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} Login Data.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Created Time', 'Username', 'Password',
                            'Origin URL', 'Blacklisted by User',
                            'Browser Name')
            data_list = []
            for row in all_rows:
                password = ''
                password_enc = row[1]
                if password_enc:
                    password = decrypt(password_enc).decode("utf-8", 'replace')
                valid_date = get_valid_date(row[2], row[3])
                data_list.append((valid_date, row[0], password, row[4], row[5],
                                  browser_name))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} login data'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} Login Data'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} Login Data available')

        db.close()
Esempio n. 23
0
def get_skype(files_found, report_folder, seeker, wrap_text):

    user_id = None
    source_file = ''
    for file_found in files_found:

        file_name = str(file_found)
        if (('live' in file_name.lower())
                and ('db-journal' not in file_name.lower())):
            skype_db = str(file_found)
            # File name has a format of live: which does not write out to a file system correctly
            # so this will fix it to the original name from what is actually written out.
            (head,
             tail) = os.path.split(file_found.replace(seeker.directory, ''))
            source_file = os.path.join(head, "live:" + tail[5:])
        else:
            continue

        db = open_sqlite_db_readonly(skype_db)
        cursor = db.cursor()

        try:
            cursor.execute('''
                         SELECT entry_id,
                      CASE
                        WHEN Ifnull(first_name, "") == "" AND Ifnull(last_name, "") == "" THEN entry_id
                        WHEN first_name is NULL THEN replace(last_name, ",", "")
                        WHEN last_name is NULL THEN replace(first_name, ",", "")
                        ELSE replace(first_name, ",", "") || " " || replace(last_name, ",", "")
                      END AS name
               FROM user 
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            user_id = all_rows[0]

        try:
            cursor.execute('''
                    SELECT 
                           contact_book_w_groups.conversation_id, 
                           contact_book_w_groups.participant_ids, 
                           messages.time/1000 as start_date, 
                           messages.time/1000 + messages.duration as end_date, 
                           case messages.is_sender_me when 0 then "Incoming" else "Outgoing"
                           end is_sender_me, 
                           messages.person_id AS sender_id 
                    FROM   (SELECT conversation_id, 
                                   Group_concat(person_id) AS participant_ids 
                            FROM   particiapnt 
                            GROUP  BY conversation_id 
                            UNION 
                            SELECT entry_id AS conversation_id, 
                                   NULL 
                            FROM   person) AS contact_book_w_groups 
                           join chatitem AS messages 
                             ON messages.conversation_link = contact_book_w_groups.conversation_id 
                    WHERE  message_type == 3
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            report = ArtifactHtmlReport('Skype - Call Logs')
            report.start_artifact_report(report_folder, 'Skype - Call Logs')
            report.add_script()
            data_headers = (
                'Start Time', 'End Time', 'From ID', 'To ID', 'Call Direction'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                to_id = None
                if row[4] == "Outgoing":
                    if ',' in row[1]:
                        to_id = row[1]
                    else:
                        to_id = row[0]
                starttime = datetime.datetime.fromtimestamp(int(
                    row[2])).strftime('%Y-%m-%d %H:%M:%S')
                endtime = datetime.datetime.fromtimestamp(int(
                    row[3])).strftime('%Y-%m-%d %H:%M:%S')
                data_list.append((starttime, endtime, row[5], to_id, row[4]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Skype - Call Logs'
            tsv(report_folder, data_headers, data_list, tsvname, source_file)

            tlactivity = f'Skype - Call Logs'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Skype Call Log available')

        try:
            cursor.execute('''
		    SELECT contact_book_w_groups.conversation_id,
                           contact_book_w_groups.participant_ids,
                           messages.time/1000,
                           messages.content,
                           messages.device_gallery_path,
                           case messages.is_sender_me when 0 then "Incoming" else "Outgoing"
                           end is_sender_me, 
                           messages.person_id
                           FROM   (SELECT conversation_id,
                                   Group_concat(person_id) AS participant_ids
                            FROM   particiapnt
                            GROUP  BY conversation_id
                            UNION
                            SELECT entry_id as conversation_id,
                                   NULL
                            FROM   person) AS contact_book_w_groups
                           JOIN chatitem AS messages
                             ON messages.conversation_link = contact_book_w_groups.conversation_id
                    WHERE message_type != 3
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            report = ArtifactHtmlReport('Skype - Messages')
            report.start_artifact_report(report_folder, 'Skype - Messages')
            report.add_script()
            data_headers = (
                'Send Time', 'Thread ID', 'Content', 'Direction', 'From ID',
                'To ID', 'Attachment'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                thread_id = None
                if row[1] == None:
                    thread_id = row[0]
                to_id = None
                if row[5] == "Outgoing":
                    if row[1] == None:
                        to_id = None
                    elif ',' in row[1]:
                        to_id = row[1]
                    else:
                        to_id = row[0]
                sendtime = datetime.datetime.fromtimestamp(int(
                    row[2])).strftime('%Y-%m-%d %H:%M:%S')

                data_list.append((sendtime, thread_id, row[3], row[5], row[6],
                                  to_id, row[4]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Skype - Messages'
            tsv(report_folder, data_headers, data_list, tsvname, source_file)

            tlactivity = f'Skype - Messages'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Skype messages data available')

        try:
            cursor.execute('''
                    SELECT entry_id, 
                           CASE
                             WHEN Ifnull(first_name, "") == "" AND Ifnull(last_name, "") == "" THEN entry_id
                             WHEN first_name is NULL THEN replace(last_name, ",", "")
                             WHEN last_name is NULL THEN replace(first_name, ",", "")
                             ELSE replace(first_name, ",", "") || " " || replace(last_name, ",", "")
                           END AS name
                    FROM   person 
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            report = ArtifactHtmlReport('Skype - Contacts')
            report.start_artifact_report(report_folder, 'Skype - Contacts')
            report.add_script()
            data_headers = (
                'Entry ID', 'Name'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Skype - Contacts'
            tsv(report_folder, data_headers, data_list, tsvname, source_file)

        else:
            logfunc('No Skype Contacts found')

        db.close

    return
Esempio n. 24
0
def get_knowCorientation(files_found, report_folder, seeker):
	iOSversion = scripts.artifacts.artGlobals.versionf
	if version.parse(iOSversion) < version.parse("11"):
		logfunc("Unsupported version for KnowledgC Screen Orientation on iOS " + iOSversion)
		return ()

	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			CASE ZOBJECT.ZVALUEINTEGER
				WHEN '0' THEN 'PORTRAIT' 
				WHEN '1' THEN 'LANDSCAPE' 
			 ELSE ZOBJECT.ZVALUEINTEGER
			END "ORIENTATION",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		  (ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME is "/display/orientation"  	
	""")

	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	if usageentries > 0:
		data_list = []    
		for row in all_rows:
			data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9]))

		description = ''
		report = ArtifactHtmlReport('KnowledgeC Screen Orientation')
		report.start_artifact_report(report_folder, 'Screen Orientation', description)
		report.add_script()
		data_headers = ('Start','End','Orientation','Usage in Seconds','Usage in Minutes','Day of Week','GMT Offset','Entry Creation','UUID','Zobject Table ID')     
		report.write_artifact_data_table(data_headers, data_list, file_found)
		report.end_artifact_report()
		
		tsvname = 'KnowledgeC Screen Orientation'
		tsv(report_folder, data_headers, data_list, tsvname)
		
		tlactivity = 'KnowledgeC Screen Orientation'
		timeline(report_folder, tlactivity, data_list, data_headers)
	else:
		logfunc('No data available in table')

	db.close()
	return      
	
Esempio n. 25
0
def get_knowCinferredmotion(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Inferred Motion on iOS " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_device_inferred_motion.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	  SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUEINTEGER AS "VALUE",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/inferred/motion" 	
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Inferred Motion')
        report.start_artifact_report(report_folder, 'Inferred Motion',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Value', 'Usage in Seconds',
                        'Usage in Minutes', 'Day of Week', 'GMT Offset',
                        'Entry Creation', 'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Inferred Motion'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Inferred Motion'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 26
0
def get_knowCinstall(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
		SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
			ZSTRUCTUREDMETADATA .Z_DKAPPINSTALLMETADATAKEY__PRIMARYCATEGORY AS "APP CATEGORY",
			ZSTRUCTUREDMETADATA .Z_DKAPPINSTALLMETADATAKEY__TITLE AS "APP NAME",
			 CASE ZOBJECT.ZSTARTDAYOFWEEK 
					WHEN "1" THEN "Sunday"
					WHEN "2" THEN "Monday"
					WHEN "3" THEN "Tuesday"
					WHEN "4" THEN "Wednesday"
					WHEN "5" THEN "Thursday"
					WHEN "6" THEN "Friday"
					WHEN "7" THEN "Saturday"
			 END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
			FROM
				ZOBJECT 
				LEFT JOIN
					ZSTRUCTUREDMETADATA 
					ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
				LEFT JOIN
					ZSOURCE 
					ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
			WHERE
				ZSTREAMNAME = "/app/install"
			''')
    else:
        cursor = db.cursor()
        cursor.execute('''
			SELECT
					DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
					DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
					ZOBJECT.ZVALUESTRING AS "BUNDLE ID",
					CASE ZOBJECT.ZSTARTDAYOFWEEK 
						WHEN "1" THEN "Sunday"
						WHEN "2" THEN "Monday"
						WHEN "3" THEN "Tuesday"
						WHEN "4" THEN "Wednesday"
						WHEN "5" THEN "Thursday"
						WHEN "6" THEN "Friday"
						WHEN "7" THEN "Saturday"
					END "DAY OF WEEK",
					ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
					DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",	
					ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
				FROM
				   ZOBJECT 
				   LEFT JOIN
				      ZSTRUCTUREDMETADATA 
				      ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
				   LEFT JOIN
				      ZSOURCE 
				      ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
				WHERE ZSTREAMNAME is "/app/install" 
					''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            report = ArtifactHtmlReport('KnowledgeC Installed Apps')
            report.start_artifact_report(report_folder, 'Installed Apps')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'App Category',
                            'App Name', 'Day of Week', 'GMT Offset',
                            'Entry Creation', 'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Installed Apps'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Installed Apps'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report = ArtifactHtmlReport('KnowledgeC Installed Apps')
            report.start_artifact_report(report_folder, 'Installed Apps')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Installed Apps'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Installed Apps'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 27
0
def get_teams(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('SkypeTeams.db'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime("arrivalTime"/1000, 'unixepoch'),
        userDisplayName,
        content,
        displayName,
        datetime("deleteTime"/1000, 'unixepoch'),
        Message.conversationId,
        messageId
        FROM Message
        left join Conversation
        on Message.conversationId = Conversation.conversationId
        ORDER BY  Message.conversationId, arrivalTime
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Teams Messages')
            report.start_artifact_report(report_folder, 'Teams Messages')
            report.add_script()
            data_headers = ('Timestamp', 'User Display Name', 'Content',
                            'Topic Name', 'Delete Time', 'Conversation ID',
                            'Message ID')
            data_list = []
            for row in all_rows:
                timeone = row[0]
                timetwo = row[4]
                if timeone == '1970-01-01 00:00:00':
                    timeone = ''
                if timetwo == '1970-01-01 00:00:00':
                    timetwo = ''
                data_list.append(
                    (timeone, row[1], row[2], row[3], timetwo, row[5], row[6]))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'Teams Messages'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Teams Messages'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Teams Messages data available')

        cursor.execute('''
        SELECT
        datetime("lastSyncTime"/1000, 'unixepoch'),
        givenName,
        surname,
        displayName,
        email,
        secondaryEmail,
        alternativeEmail,
        telephoneNumber,
        homeNumber,
        accountEnabled,
        type,
        userType,
        isSkypeTeamsUser,
        isPrivateChatEnabled
        from
        User
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Teams Users')
            report.start_artifact_report(report_folder, 'Teams Users')
            report.add_script()
            data_headers = ('Last Sync', 'Given Name', 'Surname',
                            'Display Name', 'Email', 'Secondary Email',
                            'Alt. Email', 'Telephone Number', 'Home Number',
                            'Account Enabled?', 'Type', 'User Type',
                            'Is Teams User?', 'Is Private Chat Enabled?')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'Teams Users'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Teams Users data available')

        cursor.execute('''
        SELECT
        datetime(json_extract(attributeValue, '$.connectTimeMillis') /1000, 'unixepoch') as connectTimeMillis,
        datetime(json_extract(attributeValue, '$.endTimeMillis') /1000, 'unixepoch') as endTimeMillis,
        json_extract(attributeValue, '$.callState') as callState,
        json_extract(attributeValue, '$.callType') as callType,
        json_extract(attributeValue, '$.originatorDisplayName') as originator,
        json_extract(attributeValue, '$.callDirection') as callDirection,
        User.givenName as targetparticipantName,
        json_extract(attributeValue, '$.sessionType') as sessionType,
        json_extract(attributeValue, '$.callId') as callId,
        json_extract(attributeValue, '$.target') as target
        from MessagePropertyAttribute, User
        where propertyId = 'CallLog' and target = User.mri
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Teams Call Log')
            report.start_artifact_report(report_folder, 'Teams Call Log')
            report.add_script()
            data_headers = ('Connect Time', 'End Time', 'Call State',
                            'Call Type', 'Originator', 'Call Direction',
                            'Target Participant Name', 'Session Type')
            data_list = []
            for row in all_rows:
                timeone = row[0]
                timetwo = row[1]
                if timeone == '1970-01-01 00:00:00':
                    timeone = ''
                if timetwo == '1970-01-01 00:00:00':
                    timetwo = ''
                data_list.append((timeone, timetwo, row[2], row[3], row[4],
                                  row[5], row[6], row[7]))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'Teams Call Log'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Teams Call Log data available')

        cursor.execute('''
        SELECT
        datetime("activityTimestamp"/1000, 'unixepoch') as activityTimestamp,
        sourceUserImDisplayName,
        messagePreview,
        activityType,
        activitySubtype,
        isRead
        FROM ActivityFeed
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Teams Activity Feed')
            report.start_artifact_report(report_folder, 'Teams Activity Feed')
            report.add_script()
            data_headers = ('Timestamp', 'Display Name', 'Message Preview',
                            'Activity Type', 'Activity Subtype', 'Is read?')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'Teams Activity Feed'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Teams Activity Feed'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Teams Activity Feed data available')

        cursor.execute('''
        SELECT
        lastModifiedTime,
        fileName,
        type,
        objectUrl,
        isFolder,
        lastModifiedBy
        FROM FileInfo
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Teams File Info')
            report.start_artifact_report(report_folder, 'Teams File Info')
            report.add_script()
            data_headers = ('Timestamp', 'File Name', 'Type', 'Object URL',
                            'Is Folder?', 'Last Modified By')
            data_list = []
            for row in all_rows:
                mtime = row[0].replace('T', ' ')
                data_list.append(
                    (mtime, row[1], row[2], row[3], row[4], row[5]))
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'Teams File Info'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Teams File Info'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Teams File Info data available')

    db.close()
Esempio n. 28
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        cursor.execute("""
		SELECT
		DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
		BUNDLEID AS "BUNDLE ID",
		LATITUDE || ", " || LONGITUDE AS "COORDINATES",
		ALTITUDE AS "ALTITUDE",
		HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
		VERTICALACCURACY AS "VERTICAL ACCURACY",
		STATE AS "STATE",
		AGE AS "AGE",
		ROUTINEMODE AS "ROUTINE MODE",
		LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
		HEX(SIG) AS "SIG (HEX)",
		LATITUDE AS "LATITUDE",
		LONGITUDE AS "LONGITUDE",
		SPEED AS "SPEED",
		COURSE AS "COURSE",
		CONFIDENCE AS "CONFIDENCE"
		FROM APPHARVEST
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Coordinates',
                            'Altitude', 'Horizontal Accuracy',
                            'Vertical Accuracy', 'State', 'Age',
                            'Routine Mode', 'Location of Interest Type',
                            'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                            'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list)
        else:
            logfunc('No data available for LocationD App Harvest')

    cursor = db.cursor()
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	SID AS "SID",
	NID AS "NID",
	BSID AS "BSID",
	ZONEID AS "ZONEID",
	BANDCLASS AS "BANDCLASS",
	CHANNEL AS "CHANNEL",
	PNOFFSET AS "PNOFFSET",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CDMACELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13],
                 row[14], row[15], row[16], row[17]))

        description = ''
        report = ArtifactHtmlReport('LocationD CDMA Location')
        report.start_artifact_report(report_folder, 'CDMA Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'SID', 'NID',
                        'BSID', 'ZONEID', 'BANDCLASS', 'Channel', 'PNOFFSET',
                        'Altitude', 'Speed', 'Course', 'Confidence',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD CDMA Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD CDMA Location'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available for LocationD CDMA Location')

    cursor = db.cursor()
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP", 
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	LAC AS "LAC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PSC AS "PSC",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD Cell Location')
        report.start_artifact_report(report_folder, 'Cell Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'LAC', 'CI',
                        'UARFCN', 'PSC', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell Location'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available for LocationD Cell Location')

    cursor = db.cursor()
    cursor.execute("""
	SELECT 
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PID AS "PID",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM LTECELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('LocationD LTE Location')
        report.start_artifact_report(report_folder, 'LTE Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'CI',
                        'UARFCN', 'PID', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD LTE Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD LTE Location'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available for LocationD WiFi Location')
Esempio n. 29
0
def get_notificationsDuet(files_found, report_folder, seeker):
    
    for file_found in files_found:
        file_found = str(file_found)
        filename = os.path.basename(file_found)
        if filename.startswith('.'):
            continue
        if os.path.isfile(file_found):
            if 'tombstone' in file_found:
                continue
            else:
                pass
        else:
            continue
    
        convertedtime1 = guid = title = subtitle = bundledata = bodyread = bundleidread = optionaltextread = bundleid2read = optionalgmarkeread = appleidread = convertedtime2 =  ''
        
        with open(file_found, 'rb') as file:
            data = file.read()
        
        data_list = []    
        headerloc = data.index(b'SEGB')
        #print(headerloc)
        
        b = data
        ab = BytesIO(b)
        ab.seek(headerloc)
        ab.read(4) #Main header
        #print('---- Start of Notifications ----')
        
        while True:
            #print('----')
            sizeofnotificatoninhex = (ab.read(4))
            sizeofnotificaton = (struct.unpack_from("<i",sizeofnotificatoninhex)[0])
            if sizeofnotificaton == 0:
                break
            notificatonmessage = (ab.read(sizeofnotificaton + 28))
            
            #print(sizeofnotificatoninhex)
            #print(sizeofnotificaton)
            #print(notificatonmessage)
            
            notification = notificatonmessage
            mensaje = BytesIO(notification)
            
            mensaje.read(4) #Notification Header
            
            date1 = mensaje.read(8) #Date in hex
            #print(f'Date1: {date1}')
            date1 = (struct.unpack_from("<d",date1)[0])
            convertedtime1 = timestampsconv(date1)
            #print(convertedtime1)
            
            mensaje.read(27)
            """ #Revisit to get date out.
            date2 = mensaje.read(8) #Date in hex
            #print(f'Date2: {date2}')
            for x in date2:
                #print(hex(x))
            date2 = (struct.unpack_from("<d",date2)[0])
            convertedtime2 = timestampsconv(date2)
            #print(convertedtime2)
            """
            test = mensaje.read(1)
            if test == b'\x12':
                mensaje.read(1) #byte length guid
            else:
                mensaje.read(2) #1st byte indicator 2nd byte length to sweep. Both standard.
                
            guid = mensaje.read(36)
            guid = (guid.decode('latin-1'))
            #print(guid)
            
            checktitle = mensaje.read(1) #Title marker
            if checktitle == b'\x1a':
                #print('there is x1a')
                titlelength = mensaje.read(1)
                #print(titlelength)
                if titlelength >= b'\x80':
                    mensaje.read(1)
                lengthtoread = (int(titlelength.hex(), 16))
                #print(lengthtoread)
                title = mensaje.read(lengthtoread)
                
                title = (title.decode('latin-1'))
                
                title = (utf8_in_extended_ascii(title)[1])
                #print(f'Title: {title}')
                checksubtitle = mensaje.read(1)
            else:
                checksubtitle = checktitle
            if  checksubtitle == b'\x22':
                #print('there is x22')
                checksubtitlelength = mensaje.read(1)
                #print(checksubtitlelength)
                if checksubtitlelength >= b'\x80':
                    mensaje.read(1)
                subtitlelengthtoread = (int(checksubtitlelength.hex(), 16))
                #print(subtitlelengthtoread )
                subtitle = mensaje.read(subtitlelengthtoread )
                subtitle = (subtitle.decode('latin-1'))
                subtitle= (utf8_in_extended_ascii(subtitle)[1])
                #print(f'Subtitle: {subtitle}')
                bodylenght = mensaje.read(1)
            else:
                body = checktitle
                #print(f'Checktitle: {body}')
                
            bodylenght = mensaje.read(1)
            #print(f'Body lenght: {bodylenght}')
            #print(f'File tell:{mensaje.tell()}')
            
            if b'\x80' <= bodylenght <= b'\x8F' :
                factor = mensaje.read(1)
                factor = (int(factor.hex(), 16))
                
                #print(f'Factor: {factor}')
                if factor == 1:
                    bodytoread = (int(bodylenght.hex(), 16))
                    bodytoread = bodytoread * factor
                else:
                    byted = (bodylenght.hex())
                    
                    values = []
                    for byte in byted:
                        byte = int(byte, 16)
                        high, low = byte >> 4, byte & 0x0F
                        #print(hex(high), hex(low))
                        values.append(low)
                        
                    #print(values[0], values[1]) 
                    #calculation
                    firstorder = values[0] * 16
                    secondorder = values[1] * 1
                    
                    basenumber = firstorder * factor
                    sweep = basenumber + secondorder
                    
                    bodytoread = sweep
                    
                    
            elif bodylenght > b'\x8F':
                #print(f'Mayor body lenght: {bodylenght}')
                offsetbody = mensaje.tell()
                #print(f'Offset mensaje: {offsetbody}')
                
                abcd = notificatonmessage.index(b'\x30\x00\x42')
                #print(f"Notificationmensaje index offset: {abcd}")
                #print(f'Notification seek offset: {notification.tell()}')
                bodytoread = abcd - offsetbody
                
            else:
                bodytoread = (int(bodylenght.hex(), 16))
                
            #print(f'Body to read afuera: {bodytoread}')
            
            if bodylenght == b'\x00':
                bundlelen = mensaje.read(1)
                bundlelen = mensaje.read(1)
                #print(bundlelen)
                bundlelen = (int(bundlelen.hex(), 16))
                bundledata = mensaje.read(bundlelen)
                bundledata  = (bundledata.decode('latin-1'))
                bundledata = (utf8_in_extended_ascii(bundledata)[1])
                #print(f'Bundle ID: {bundledata}')
                optionaltextcheck = mensaje.read(3)
                #print(f'Optional cuando mensaje es 00: {optionaltextcheck}')
            else:
                
                bodyread= mensaje.read(bodytoread)
                bodyread = (bodyread.decode('latin-1'))
                bodyread = (utf8_in_extended_ascii(bodyread)[1])
                #print(f'Body to read len: {len(bodyread)}')
                #print(f'Body: {bodyread}\n')
                
                bundleheader = mensaje.read(3)
                #print(f'Bundle header: {bundleheader}')
                
                bundleidtoread = mensaje.read(1)
                bundleidtoread = (int(bundleidtoread.hex(), 16))
                bundleidread = mensaje.read(bundleidtoread)
                bundleidread  = (bundleidread.decode('latin-1'))
                bundleidread = (utf8_in_extended_ascii(bundleidread)[1])
                #print(f'Bundle ID: {bundleidread}')
                
                optionaltextcheck = mensaje.read(3)
                #print(f'OPtional text header A: {optionaltextcheck}')
            bundle2lenght = ''
            if optionaltextcheck == b'\x4A\x00\x52':
                #print('In x52')
                optionaltextlength = mensaje.read(1)
                optionaltextlength = (int(optionaltextlength.hex(), 16))
                optionaltextread = mensaje.read(optionaltextlength)
                optionaltextread = (optionaltextread.decode('latin-1'))
                boptionaltextread = (utf8_in_extended_ascii(optionaltextread)[1])
                #print(f'Optional Text: {optionaltextread}')
                bundle2header = mensaje.read(1) #Always x62
                bundle2lenght = mensaje.read(1) #lenght
                
            if optionaltextcheck == b'\x4A\x00\x62':
                #print('in X62')
                bundle2lenght = mensaje.read(1)
                
            if bundle2lenght != '':
                bundle2lenght = (int(bundle2lenght.hex(), 16))
                bundleid2read = mensaje.read(bundle2lenght)
                bundleid2read  = (bundleid2read.decode('latin-1'))
                bundleid2read = (utf8_in_extended_ascii(bundleid2read)[1])
                #print(f'Bundle ID 2: {bundleid2read}\n')
                
            optionalgmarker = mensaje.read(1) #unknown bytes
            if optionalgmarker == b'\x6A':
                optionalgmarkerlen = mensaje.read(1)
                optionalgmarkerlen = (int(optionalgmarkerlen.hex(), 16))
                optionalgmarkeread = mensaje.read(optionalgmarkerlen)
                optionalgmarkeread  = (optionalgmarkeread.decode('latin-1'))
                optionalgmarkeread= (utf8_in_extended_ascii(optionalgmarkeread)[1])
                #print(f'Optional GUID: {optionalgmarkeread}')
                mensaje.read(20)
            else:
                mensaje.read(19)
                
            checkappleid = mensaje.read(2)
            if checkappleid == b'\xA2\x01':
                appleidread = ''
                while True:
                    appleidlen = mensaje.read(1)
                    appleidlen = (int(appleidlen.hex(), 16))
                    appleidread = mensaje.read(appleidlen)
                    appleidread  = (appleidread .decode('latin-1'))
                    appleidread = (utf8_in_extended_ascii(appleidread)[1])
                    #print(f'Apple ID: {appleidread}')
                    appleidread = appleidread + ' ' + appleidread
                    innercheck = mensaje.read(2)
                    if innercheck == b'\xA2\x01':
                        pass
                    else:
                        mensaje.read(1)
                        break
            else:
                mensaje.read(3)
                
            lastdate = mensaje.read(8)
            #print(lastdate)
            date2 = (struct.unpack_from("<d",lastdate)[0])
            convertedtime2 = timestampsconv(date2)
            #print(f'Date2: {convertedtime2}')
            data_list.append((convertedtime1,guid,title,subtitle,bundledata,bodyread,bundleidread,optionaltextread,bundleid2read,optionalgmarkeread,appleidread,convertedtime2, filename))
            
            convertedtime1 = guid = title = subtitle = bundledata = bodyread = bundleidread = optionaltextread = bundleid2read = optionalgmarkeread = appleidread = convertedtime2 =  ''
            
            modresult = (sizeofnotificaton % 8)
            resultante =  8 - modresult
            
            if modresult == 0:
                pass
            else:
                ab.read(resultante)
                #print("--------")
                
            
    

        if len(data_list) > 0:
        
            description = ''
            report = ArtifactHtmlReport(f'Notifications Duet')
            report.start_artifact_report(report_folder, f'Notifications Duet - {filename}', description)
            report.add_script()
            data_headers = ('Timestamp','GUID','Title','Subtitle','Bundle Data','Body','Bundle ID', 'Optional Text', 'Bundle ID', 'Optional GUID', 'Apple ID', 'Timestamp', 'Filename')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'Notifications Duet - {filename}'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc(f'No data available for Notifications Duet')
Esempio n. 30
0
def process_siminfo(folder, uid, report_folder):

    #Query to create report
    db = sqlite3.connect(folder)
    cursor = db.cursor()

    #Query to create report
    try:
        cursor.execute('''
        SELECT
            number,
            imsi,
            display_name,
            carrier_name,
            iso_country_code,
            carrier_id,
            icc_id
        FROM
            siminfo
        ''')
    except:
        cursor.execute('''
        SELECT
            number,
            card_id,
            display_name,
            carrier_name,
            carrier_name,
            carrier_name,
            icc_id
        FROM
            siminfo
        ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Device Info')
        report.start_artifact_report(report_folder, f'SIM_info_{uid}')
        report.add_script()
        data_headers = ('Number', 'IMSI', 'Display Name', 'Carrier Name',
                        'ISO Code', 'Carrier ID', 'ICC ID')

        data_list = []
        for row in all_rows:
            if row[3] == row[4]:
                row1 = ''
                row4 = ''
                row5 = ''
            else:
                row1 = row[1]
                row4 = row[4]
                row5 = row[5]
            data_list.append(
                (row[0], row1, row[2], row[3], row4, row5, row[6]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()

        tsvname = f'sim info {uid}'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No SIM_Info{uid} data available')
    db.close()