Exemplo n.º 1
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		bundleid,
		altitude,
		horizontalaccuracy,
		verticalaccuracy,
		state,
		age,
		routinemode,
		locationofinteresttype,
		latitude,
		longitude,
		speed,
		course,
		confidence
		from appharvest
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Altitude',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'State', 'Age', 'Routine Mode',
                            'Location of Interest Type', 'Latitude',
                            'Longitude', 'Speed', 'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    if does_table_exist(db, "cdmacelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		sid,
		nid,
		bsid,
		zoneid,
		bandclass,
		channel,
		pnoffset,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from cdmacelllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            description = ''
            report = ArtifactHtmlReport('LocationD CDMA Location')
            report.start_artifact_report(report_folder, 'CDMA Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'SID', 'NID', 'BSID', 'ZONEID',
                            'BANDCLASS', 'Channel', 'PNOFFSET', 'Altitude',
                            'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD CDMA Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD CDMA Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD CDMA Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD CDMA Location')

    if does_table_exist(db, "celllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		lac,
		ci,
		uarfcn,
		psc,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from celllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13], row[14]))

            description = ''
            report = ArtifactHtmlReport('LocationD Cell Location')
            report.start_artifact_report(report_folder, 'Cell Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'LAC', 'CI', 'UARFCN',
                            'PSC', 'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD Cell Location')

    if does_table_exist(db, "ltecelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select 
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		ci,
		uarfcn,
		pid,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from ltecelllocation	
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD LTE Location')
            report.start_artifact_report(report_folder, 'LTE Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'CI', 'UARFCN', 'PID',
                            'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD LTE Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD LTE Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD LTE Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    cursor.execute("""
	select
	datetime(timestamp + 978307200,'unixepoch'),
	mac,
	channel,
	infomask,
	speed,
	course,
	confidence,
	score,
	reach,
	horizontalaccuracy,
	verticalaccuracy,
	latitude,
	longitude
	from wifilocation
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'MAC', 'Channel', 'Infomask', 'Speed',
                        'Course', 'Confidence', 'Score', 'Reach',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')
Exemplo n.º 2
0
def get_knowCbluetooth(files_found, report_folder, seeker):
	iOSversion = scripts.artifacts.artGlobals.versionf
	if version.parse(iOSversion) < version.parse("11"):
		logfunc("Unsupported version for KnowledgC Bluetooth" + iOSversion)
		return ()

	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZSTRUCTUREDMETADATA.Z_DKBLUETOOTHMETADATAKEY__ADDRESS AS "BLUETOOTH ADDRESS", 
		ZSTRUCTUREDMETADATA.Z_DKBLUETOOTHMETADATAKEY__NAME AS "BLUETOOTH NAME",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",  
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
		ZOBJECT.ZUUID AS "UUID", 
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/bluetooth/isConnected"
	"""
	)

	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	if usageentries > 0:
		data_list = []    
		for row in all_rows:
			data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))

		description = ''
		report = ArtifactHtmlReport('KnowledgeC Bluetooth Connections')
		report.start_artifact_report(report_folder, 'Bluetooth Connections', description)
		report.add_script()
		data_headers = ('Start','End','Bluetooth Address','Bluetooth Name','Usage in Seconds','Usage in Minutes','Day of Week','GMT Offset','Entry Creation','UUID','Zobject Table ID')     
		report.write_artifact_data_table(data_headers, data_list, file_found)
		report.end_artifact_report()
		
		tsvname = 'KnowledgeC Bluetooth'
		tsv(report_folder, data_headers, data_list, tsvname)
		
		tlactivity = 'KnowledgeC Bluetooth'
		timeline(report_folder, tlactivity, data_list, data_headers)
	else:
		logfunc('No data available in table')

	db.close()
	return      
	
Exemplo n.º 3
0
def get_googleTasks(files_found, report_folder, seeker, wrap_text):
    for file_found in files_found:
        file_found = str(file_found)

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()

        cursor.execute('''
            SELECT
            TaskId, TaskListId, TaskRecurrenceId, EffectiveTask, Completed, HasDirtyState, DueDate
            FROM
            Tasks;
        ''')

        all_rows = cursor.fetchall()
        all_new_rows = []

        for row in all_rows:
            if (row[4] == 0):
                task, task_details, created, completed, modified, timezone = protobuf_parse_not_completed(
                    row[3])
            elif (row[4] == 1):
                task, task_details, created, completed, modified, timezone = protobuf_parse_completed(
                    row[3])
            new_data = (created, modified, completed, row[6], timezone, row[0],
                        row[1], row[2], task, task_details,
                        'True' if row[4] == 1 else 'False',
                        'True' if row[5] == 1 else 'False')
            all_new_rows.append(new_data)

        usageentries = len(all_new_rows)
        if (usageentries > 0):
            report = ArtifactHtmlReport('Google Tasks')
            report.start_artifact_report(report_folder, "Google Tasks")
            report.add_script()

            data_headers = ('Created Time', 'Last Modified Time',
                            'Completed Time', 'Task Due Date', 'Time Zone',
                            'Task ID', 'Task List ID', 'Task Recurrence Id',
                            'Task Name', 'Task Details', 'Completed',
                            'Has Dirty State')

            data_list = all_new_rows

            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = "Google Tasks"
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = "Google Tasks"
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No Google Tasks found')

        db.close()
    return
Exemplo n.º 4
0
def get_interactionCcontacts(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(zinteractions.zstartdate + 978307200, 'unixepoch'),
        datetime(zinteractions.zenddate + 978307200, 'unixepoch'),
        zinteractions.zbundleid,
        zcontacts.zdisplayname,
        zcontacts.zidentifier,
        zinteractions.zdirection,
        zinteractions.zisresponse,
        zinteractions.zrecipientcount,
        datetime(zinteractions.zcreationdate + 978307200, 'unixepoch'),
        datetime(zcontacts.zcreationdate + 978307200, 'unixepoch'),
        zinteractions.zcontenturl
        from
        zinteractions 
        left join
        zcontacts 
        on zinteractions.zsender = zcontacts.z_pk        
        ''')
        
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        
        if version.parse(iOSversion) >= version.parse("10"):
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10]))

            report = ArtifactHtmlReport('InteractionC')
            report.start_artifact_report(report_folder, 'Contacts')
            report.add_script()
            data_headers = ('Start Date','End Date','Bundle ID','Display Name','Identifier','Direction','Is Response','Recipient Count','Zinteractions Creation Date','Zcontacs Creation Date','Content URL')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'InteractionC Contacts'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'InteractonC Contacts'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in InteractionC Contacts')
        
    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        cursor.execute('''
        select
            datetime(zinteractions.ZCREATIONDATE + 978307200, 'unixepoch'),
            ZINTERACTIONS.zbundleid,
            ZINTERACTIONS.ztargetbundleid,
            ZINTERACTIONS.zuuid,
            ZATTACHMENT.zcontenturl
            from zinteractions
            inner join z_1interactions
            on zinteractions.z_pk = z_1interactions.z_3interactions
            inner join zattachment on z_1interactions.z_1attachments = zattachment.z_pk
        ''')
        
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        
        if version.parse(iOSversion) >= version.parse("10"):
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4]))
            
            report = ArtifactHtmlReport('InteractionC')
            report.start_artifact_report(report_folder, 'Attachments')
            report.add_script()
            data_headers = ('Creation Date', 'Bundle ID', 'Target Bundle ID', 'ZUUID', 'Content URL')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'InteractionC Attachments'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'InteractionC Attachments'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in InteractionC Attachments')
    

    db.close()
    return      
    
    
    
Exemplo n.º 5
0
def read_mms_messages(db, report_folder, file_found, seeker):

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)

    cursor = db.cursor()
    cursor.execute(mms_query)
    all_rows = cursor.fetchall()
    entries = len(all_rows)
    if entries > 0:
        report = ArtifactHtmlReport('MMS messages')
        report.start_artifact_report(report_folder, 'MMS messages')
        report.add_script()
        data_headers = ('Date', 'MSG ID', 'Thread ID', 'Date sent', 'Read',
                        'From', 'To', 'Cc', 'Bcc', 'Body')
        data_list = []

        last_id = 0
        temp_mms_list = []
        for row in all_rows:
            id = row['mms_id']
            if id != last_id:  # Start of new message, write out old message in temp buffer
                add_mms_to_data_list(data_list, temp_mms_list, folder_name)
                # finished writing
                last_id = id
                temp_mms_list = []

            msg = MmsMessage(row['date'], row['mms_id'], row['thread_id'],
                             row['date_sent'], row['read'], row['FROM'],
                             row['TO'], row['CC'], row['BCC'], row['msg_box'],
                             row['part_id'], row['seq'], row['ct'], row['cl'],
                             row['_data'], row['text'])
            temp_mms_list.append(msg)

            data_file_path = row['_data']
            if data_file_path == None:  # Has text, no file
                msg.body = row['text']
            else:
                # Get file from path
                if data_file_path[0] == '/':
                    temp_path = data_file_path[1:]
                else:
                    temp_path = data_file_path

                path_parts = temp_path.split('/')
                # This next routine reduces /data/xx/yy/img.jpg to /xx/yy/img.jpg removing the
                # first folder in the path, so that if our root (starting point) is inside
                # that folder, it will still find the file
                if len(path_parts) > 2:
                    path_parts.pop(0)
                    temp_path = '/'.join(path_parts)

                if is_windows:
                    temp_path = temp_path.replace('/', '\\')
                data_file_path_regex = f'**{slash}' + temp_path

                files_found = seeker.search(data_file_path_regex)
                if files_found:
                    data_file_real_path = str(files_found[0])
                    shutil.copy2(data_file_real_path, report_folder)
                    data_file_name = os.path.basename(data_file_real_path)
                    msg.filename = data_file_name
                else:
                    logfunc(f'File not found: {data_file_path}')
        # add last msg to list
        add_mms_to_data_list(data_list, temp_mms_list, folder_name)

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'mms messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'MMS Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No MMS messages found!')
        return False
    return True
Exemplo n.º 6
0
def get_battery_usage_v4(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    datetime(timestamp/1000,'unixepoch'),
    appLabel,
    packageName,
    case isHidden
        when 0 then ''
        when 1 then 'Yes'
    end,
    datetime((timestamp-bootTimestamp)/1000,'unixepoch'),
    zoneId,
    totalPower,
    consumePower,
    percentOfTotal,
    foregroundUsageTimeInMs*.001 as 'Foreground Usage (Seconds)',
    backgroundUsageTimeInMs*.001 as 'Background Usage (Seconds)',
    batteryLevel,
    case BatteryStatus
        when 2 then 'Charging'
        when 3 then 'Not Charging'
        when 5 then 'Fully Charged'
    end,
    batteryHealth
    from BatteryState
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        description = 'This is battery usage details pulled from Settings Services'
        report = ArtifactHtmlReport('Settings Services - Battery Usage')
        report.start_artifact_report(report_folder,
                                     'Settings Services - Battery Usage',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Application', 'Package Name', 'Hidden',
                        'Boot Timestamp', 'Timezone', 'Total Power',
                        'Consumed Power', '% Of Consumed',
                        'Foreground Usage (Seconds)',
                        'Background Usage (Seconds)', 'Battery Level (%)',
                        'Battery Status')
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Settings Services - Battery Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Settings Services - Battery Usage'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Settings Services - Battery Usage data available')

    db.close()
    return
Exemplo n.º 7
0
def get_applicationSnapshots(files_found, report_folder, seeker):

    slash = '\\' if is_platform_windows() else '/'
    data_headers = ('App Name', 'Source Path', 'Date Modified', 'Snapshot')
    data_list = [
    ]  # Format=  [ [ 'App Name', 'ktx_path', mod_date, 'png_path' ], .. ]

    for file_found in files_found:
        file_found = str(file_found)
        if os.path.isdir(file_found):
            continue
        if file_found.lower().endswith('.ktx'):
            if os.path.getsize(file_found) < 2500:  # too small, they are blank
                continue
            parts = file_found.split(slash)
            if parts[-2] != 'downscaled':
                app_name = parts[-2].split(' ')[0]
            else:
                app_name = parts[-3].split(' ')[0]

            png_path = os.path.join(report_folder,
                                    app_name + '_' + parts[-1][:-4] + '.png')
            if save_ktx_to_png_if_valid(file_found, png_path):
                last_modified_date = datetime.datetime.fromtimestamp(
                    os.path.getmtime(file_found))
                data_list.append(
                    [app_name, file_found, last_modified_date, png_path])

        elif file_found.lower().endswith('.jpeg'):
            parts = file_found.split(slash)
            if parts[-2] != 'downscaled':
                app_name = parts[-2].split(' ')[0]
            else:
                app_name = parts[-3].split(' ')[0]
            if app_name.startswith('sceneID'):
                app_name = app_name[8:]
            #if app_name.endswith('-default'):
            #    app_name = app_name[:-8]
            dash_pos = app_name.find('-')
            if dash_pos > 0:
                app_name = app_name[0:dash_pos]

            jpg_path = os.path.join(report_folder, app_name + '_' + parts[-1])
            if shutil.copy2(file_found, jpg_path):
                last_modified_date = datetime.datetime.fromtimestamp(
                    os.path.getmtime(file_found))
                data_list.append(
                    [app_name, file_found, last_modified_date, jpg_path])

    if len(data_list):
        description = "Snapshots saved by iOS for individual apps appear here. Blank screenshots are excluded here. Dates and times shown are from file modified timestamps"
        report = ArtifactHtmlReport('App Snapshots (screenshots)')
        report.start_artifact_report(report_folder, 'App Snapshots',
                                     description)
        report.add_script()
        report_folder_name = os.path.basename(report_folder.rstrip(slash))
        data_list_for_report = []
        for app_name, ktx_path, mod_date, png_path in data_list:
            dir_path, base_name = os.path.split(png_path)
            img_html = '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid" style="max-height:300px; max-width:400px"></a>'.format(
                quote(base_name), quote(report_folder_name))
            data_list_for_report.append(
                (escape(app_name), escape(ktx_path), mod_date, img_html))
        report.write_artifact_data_table(data_headers,
                                         data_list_for_report,
                                         '',
                                         html_escape=False,
                                         write_location=False)
        report.end_artifact_report()

        tsvname = 'App Snapshots'
        tsv_headers = ('App Name', 'Source Path', 'Date Modified')
        tsv(report_folder, tsv_headers, data_list, tsvname)
    else:
        logfunc('No snapshots available')
Exemplo n.º 8
0
def get_powerlogLightplug(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_lightnining_connector_status.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
        DATETIME(LIGHTNINGCONNECTOR_TIMESTAMP + SYSTEM, 'UNIXEPOCH','LOCALTIME') AS ADJUSTED_TIMESTAMP,
        CASE IOACCESSORYPOWERMODE 
            WHEN "1" THEN "UNPLUGGED" 
            WHEN "3" THEN "PLUGGED IN" 
        END  AS "IO ACCESSORY POWER MODE",
        DATETIME(LIGHTNINGCONNECTOR_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_LIGHTNINGCONNECTOR_TIMESTAMP,
        DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
        SYSTEM AS TIME_OFFSET,
        LIGHTNINGCONNECTOR_ID AS "PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS TABLE ID" 
    	FROM
        (
        SELECT
            LIGHTNINGCONNECTOR_ID,
            LIGHTNINGCONNECTOR_TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            IOACCESSORYPOWERMODE,
            SYSTEM
        FROM
            (
            SELECT
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS.TIMESTAMP AS LIGHTNINGCONNECTOR_TIMESTAMP,
                IOACCESSORYPOWERMODE,
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS.ID AS "LIGHTNINGCONNECTOR_ID" ,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
            )
            AS LIGHTNINGCONNECTOR_STATE 
        GROUP BY
            LIGHTNINGCONNECTOR_ID 
        )
        ''')
        
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:    
                    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))

                report = ArtifactHtmlReport('Powerlog Lightning Connector Status')
                report.start_artifact_report(report_folder, 'Lightning Connector Status')
                report.add_script()
                data_headers = ('Adjusted Timestamp','Accesory Power Mode','Original Lightnint Connector Timestamp','Offset Timestamp','Table ID' )   
                report.write_artifact_data_table(data_headers, data_list, file_found)
                report.end_artifact_report()
                
                tsvname = 'Powerlog Lightning Connector'
                tsv(report_folder, data_headers, data_list, tsvname)
                
                tlactivity = 'Powerlog Lightning Connector'
                timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No data available in Powerlog Lightning Connector Status')

        db.close()
        return      
Exemplo n.º 9
0
def get_knowClocation(files_found, report_folder, seeker):
	iOSversion = scripts.artifacts.artGlobals.versionf
	if version.parse(iOSversion) < version.parse("12"):
		logfunc("Unsupported version for KnowledgC Location on iOS " + iOSversion)
		return ()

	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
      	DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LATITUDE || ", " || ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LONGITUDE AS "COORDINATES",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LOCATIONNAME AS "NAME",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__DISPLAYNAME AS "DISPLAY NAME",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__FULLYFORMATTEDADDRESS AS "FORMATTED ADDRESS",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__CITY AS "CITY",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__STATEORPROVINCE AS "STATE/PROVINCE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__COUNTRY AS "COUNTRY",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__POSTALCODE_V2 AS "POSTAL CODE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__SUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__THOROUGHFARE AS "THOROUGHFARE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__PHONENUMBERS AS "PHONE NUMBERS",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__URL AS "URL",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__CONTENTDESCRIPTION AS "CONTENT DESCRIPTION",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYREQUIREDSTRING AS "USER ACTIVITY REQUIRED STRING",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LATITUDE AS "LATITUDE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LONGITUDE AS "LONGITUDE",
		ZSOURCE.ZSOURCEID AS "SOURCE ID",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYUUID AS "ACTIVITY UUID",
		ZSOURCE.ZITEMID AS "ITEM ID",
		ZSOURCE.ZSOURCEID AS "SOURCE ID",
      CASE ZOBJECT.ZSTARTDAYOFWEEK 
         WHEN "1" THEN "Sunday"
         WHEN "2" THEN "Monday"
         WHEN "3" THEN "Tuesday"
         WHEN "4" THEN "Wednesday"
         WHEN "5" THEN "Thursday"
         WHEN "6" THEN "Friday"
         WHEN "7" THEN "Saturday"
      END "DAY OF WEEK",
      ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
      DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
      ZOBJECT.ZUUID AS "UUID",
      ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
   FROM
      ZOBJECT 
      LEFT JOIN
         ZSTRUCTUREDMETADATA 
         ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
      LEFT JOIN
         ZSOURCE 
         ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
   WHERE
      ZSTREAMNAME = "/app/locationActivity" 
	"""
	)

	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	if usageentries > 0:
		data_list = []    
		for row in all_rows:
			data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27], row[28], row[29], row[30]))

		description = ''
		report = ArtifactHtmlReport('KnowledgeC Location Activity')
		report.start_artifact_report(report_folder, 'Location Activity', description)
		report.add_script()
		data_headers = ('Start','End','Bundle ID','Coordinates','Name','Display Name','Formatted Address', 'City','State/Province','Country','Postal Code','Subthoroughfare','Thoroughfare','Phone Numebers','URL','Activity Type', 'Content Description','User Activity Required String','Content URL','Unique ID','Latitude','Logitude','Source ID','Activity UUID','Item ID','Source ID','Day of the Week','GMT Offset','Entry Creation','UUID','Zonject Table ID')     
		report.write_artifact_data_table(data_headers, data_list, file_found)
		report.end_artifact_report()
		
		tsvname = 'KnowledgeC Location Activity'
		tsv(report_folder, data_headers, data_list, tsvname)
		
		tlactivity = 'KnowledgeC Location Activity'
		timeline(report_folder, tlactivity, data_list)
	else:
		logfunc('No data available for KnowledgeC Location Activity')
Exemplo n.º 10
0
def get_fitbitSocial(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    OWNING_USER_ID,
    ENCODED_ID,
    DISPLAY_NAME,
    AVATAR_URL,
    FRIEND,
    CHILD
    FROM FRIEND
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Fitbit Friends')
        report.start_artifact_report(report_folder, 'Fitbit Friends')
        report.add_script()
        data_headers = ('Owning UserID', 'Encoded ID', 'Display Name',
                        'Avatar URL', 'Friend', 'Child')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Fitbit Friends'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Fitbit Friend data available')

    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime("LAST_UPDATED"/1000, 'unixepoch'),
    DISPLAY_NAME,
    FULL_NAME,
    ABOUT_ME,
    AVATAR_URL,
    COVER_PHOTO_URL,
    CITY,
    STATE,
    COUNTRY,
    datetime("JOINED_DATE"/1000, 'unixepoch'),
    datetime("DATE_OF_BIRTH"/1000, 'unixepoch'),
    HEIGHT,
    WEIGHT,
    GENDER,
    COACH
    FROM USER_PROFILE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Fitbit User Profile')
        report.start_artifact_report(report_folder, 'Fitbit User Profile')
        report.add_script()
        data_headers = ('Last Updated', 'Display Name', 'Full Name',
                        'About Me', 'Avatar URL', 'Cover Photo URL', 'City',
                        'State', 'Country', 'Joined Date', 'Date of Birth',
                        'Height', 'Weight', 'Gender', 'Coach')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Fitbit User Profile'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Fitbit User Profile'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Fitbit User Profile data available')

    db.close()
Exemplo n.º 11
0
def get_knowCusage(files_found, report_folder, seeker):
	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",  
			ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)", 
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
			ZOBJECT.ZUUID AS "UUID",
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/app/usage" 
	)
	""")
	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	data_list = []
	if usageentries > 0:    
		for row in all_rows:
			data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))

		description = ''
		report = ArtifactHtmlReport('KnowledgeC App Usage')
		report.start_artifact_report(report_folder, 'KnowledgeC App Usage', description)
		report.add_script()
		data_headers = ('Start','End','Bundle ID','Usage in Seconds','Usage in Minutes','Device ID','Day of the Week','GMT Offset','Entry Creation','UUID','Zobject Table ID' )     
		report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
		report.end_artifact_report()

		tsvname = 'KnowledgeC App Usage'
		tsv(report_folder, data_headers, data_list, tsvname)
	
		tlactivity = 'KnowledgeC App Usage'
		timeline(report_folder, tlactivity, data_list, data_headers)
	else:
		logfunc('No data available in table')
Exemplo n.º 12
0
def get_knowCwebusage(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Web Usage" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_app_webusage.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "APP NAME", 
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBDOMAIN AS "DOMAIN",
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBPAGEURL AS "URL",
		ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)",
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
		ZOBJECT.ZUUID AS "UUID", 
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/app/webUsage" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Web Usage')
        report.start_artifact_report(report_folder, 'Web Usage', description)
        report.add_script()
        data_headers = ('Start', 'End', 'App Name', 'Usage in Seconds',
                        'Usage in Minutes', 'Domain', 'URL', 'Device ID',
                        'Day of the Wekk', 'GMT Offset', 'Entry Creation',
                        'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Web Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Web Usage'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 13
0
def get_powerlogAirdrop(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(AIRDROP_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
                STATE,
                SUBEVENT,
                BUNDLEID AS BUNDLE_ID,
                PID,
                DATETIME(AIRDROP_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_AIRDROP_TIMESTAMP,
                DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
                SYSTEM AS TIME_OFFSET,
                AIRDROP_ID AS "PLXPCAGENT_EVENTFORWARD_AIRDROP TABLE ID"
            FROM
                (
                SELECT
                    BUNDLEID,
                    AIRDROP_ID,
                    AIRDROP_TIMESTAMP,
                    TIME_OFFSET_TIMESTAMP,
                    MAX(TIME_OFFSET_ID) AS MAX_ID,
                    SYSTEM,
                    PID,
                    SUBEVENT,
                    STATE
                FROM
                    (
                SELECT
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.TIMESTAMP AS AIRDROP_TIMESTAMP,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.BUNDLEID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.PID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.SUBEVENT,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.STATE,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.ID AS "AIRDROP_ID",
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM,
                    BUNDLEID 
                FROM
                    PLXPCAGENT_EVENTFORWARD_AIRDROP 
                LEFT JOIN
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
                    )
                AS AIRDROPSTATE 
                GROUP BY
                    AIRDROP_ID 
                )
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport(
                    'Powerlog Airdrop Connections Info')
                report.start_artifact_report(report_folder,
                                             'Airdrop Connections Info')
                report.add_script()
                data_headers = ('Adjusted Timestamp', 'State', 'Subevent',
                                'Bundle ID', 'PID',
                                'Original Airdrop Timestamp',
                                'Offset Timestamp', 'Time Offset',
                                'Airdrop Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Airdrop Connections Info'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'KnowledgeC Airdrop Connections Info'
                timeline(report_folder, tlactivity, data_list)

        else:
            logfunc('No data available in Airdop Connection Info')

        db.close()
        return
Exemplo n.º 14
0
def get_package_info(files_found, report_folder, seeker, wrap_text):
    packages = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(
                file_found):  # skip folders (there shouldn't be any)
            continue

        file_name = os.path.basename(file_found)
        if (checkabx(file_found)):
            multi_root = False
            tree = abxread(file_found, multi_root)
            xlmstring = (etree.tostring(tree.getroot()).decode())
            doc = xmltodict.parse(xlmstring)
        else:
            with open(file_found) as fd:
                doc = xmltodict.parse(fd.read())

        package_dict = doc.get('packages', {}).get('package', {})
        for package in package_dict:
            name = package.get('@name', '')
            ft = ReadUnixTimeMs(package.get('@ft', None))
            it = ReadUnixTimeMs(package.get('@it', None))
            ut = ReadUnixTimeMs(package.get('@ut', None))
            install_originator = package.get('@installOriginator', '')
            installer = package.get('@installer', '')
            code_path = package.get('@codePath', '')
            public_flags = hex(
                int(package.get('@publicFlags', 0)) & (2**32 - 1))
            private_flags = hex(
                int(package.get('@privateFlags', 0)) & (2**32 - 1))
            package = Package(name, ft, it, ut, install_originator, installer,
                              code_path, public_flags, private_flags)
            packages.append(package)

        if len(packages):
            break

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    entries = len(packages)
    if entries > 0:
        description = "All packages (user installed, oem installed and system) appear here. Many of these are not user apps"
        report = ArtifactHtmlReport('Packages')
        report.start_artifact_report(report_folder, 'Packages', description)
        report.add_script()
        data_headers = ('ft', 'Name', 'Install Time', 'Update Time',
                        'Install Originator', 'Installer', 'Code Path',
                        'Public Flags', 'Private Flags')
        data_list = []
        for p in packages:
            data_list.append((p.ft, p.name, p.install_time, p.update_time,
                              p.install_originator, p.installer, p.code_path,
                              p.public_flags, p.private_flags))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Packages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Packages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No package data available')
Exemplo n.º 15
0
def process_recentactivity(folder, uid, report_folder):

    slash = '\\' if is_platform_windows() else '/'

    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()
    #Create table recent.
    cursor.execute('''
    CREATE TABLE 
    recent(task_id TEXT, effective_uid TEXT, affinity TEXT, real_activity TEXT, first_active_time TEXT, last_active_time TEXT,
    last_time_moved TEXT, calling_package TEXT, user_id TEXT, action TEXT, component TEXT, snap TEXT,recimg TXT, fullat1 TEXT, fullat2 TEXT)
    ''')
    db.commit()
    err = 0
    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)

    for filename in glob.iglob(os.path.join(folder, 'recent_tasks', '**'),
                               recursive=True):
        if os.path.isfile(filename):  # filter dirs
            file_name = os.path.basename(filename)
            #logfunc(filename)
            #logfunc(file_name)
            #numid = file_name.split('_')[0]

            try:
                if (checkabx(filename)):
                    multi_root = False
                    tree = abxread(filename, multi_root)
                else:
                    tree = ET.parse(filename)
            except ET.ParseError:
                logfunc('Parse error - Non XML file? at: ' + filename)
                err = 1
                #print(filename)

            if err == 1:
                err = 0
                continue
            else:
                #tree = ET.parse(filename)
                root = tree.getroot()
                #print('Processed: '+filename)
                for child in root:
                    #All attributes. Get them in using json dump thing
                    fullat1 = json.dumps(root.attrib)
                    task_id = (root.attrib.get('task_id'))
                    effective_uid = (root.attrib.get('effective_uid'))
                    affinity = (root.attrib.get('affinity'))
                    real_activity = (root.attrib.get('real_activity'))
                    first_active_time = (root.attrib.get('first_active_time'))
                    last_active_time = (root.attrib.get('last_active_time'))
                    last_time_moved = (root.attrib.get('last_time_moved'))
                    calling_package = (root.attrib.get('calling_package'))
                    user_id = (root.attrib.get('user_id'))
                    #print(root.attrib.get('task_description_icon_filename'))

                    #All attributes. Get them in using json dump thing
                    fullat2 = json.dumps(child.attrib)
                    action = (child.attrib.get('action'))
                    component = (child.attrib.get('component'))
                    icon_image_path = (
                        root.attrib.get('task_description_icon_filename'))

                    #Snapshot section picture
                    snapshot = task_id + '.jpg'
                    #print(snapshot)

                    #check for image in directories
                    check1 = os.path.join(folder, 'snapshots', snapshot)
                    isit1 = os.path.isfile(check1)
                    if isit1:
                        #copy snaphot image to report folder
                        shutil.copy2(check1, report_folder)
                        #snap = r'./snapshots/' + snapshot
                        snap = snapshot
                    else:
                        snap = 'NO IMAGE'
                    #Recent_images section
                    if icon_image_path is not None:
                        recent_image = os.path.basename(icon_image_path)
                        check2 = os.path.join(folder, 'recent_images',
                                              recent_image)
                        isit2 = os.path.isfile(check2)
                        if isit2:
                            shutil.copy2(check2, report_folder)
                            #recimg = r'./recent_images/' + recent_image
                            recimg = recent_image
                        else:
                            recimg = 'NO IMAGE'
                    else:
                        #check for other files not in the XML - all types
                        check3 = glob.glob(
                            os.path.join(folder, 'recent_images', task_id,
                                         '*.*'))
                        if check3:
                            check3 = check3[0]
                            isit3 = os.path.isfile(check3)
                        else:
                            isit3 = 0

                        if isit3:
                            shutil.copy2(check3, report_folder)
                            recimg = os.path.basename(check3)
                        else:
                            recimg = 'NO IMAGE'
                    #else:
                    #    recimg = 'NO IMAGE'
                    #insert all items in database
                    cursor = db.cursor()
                    datainsert = (
                        task_id,
                        effective_uid,
                        affinity,
                        real_activity,
                        first_active_time,
                        last_active_time,
                        last_time_moved,
                        calling_package,
                        user_id,
                        action,
                        component,
                        snap,
                        recimg,
                        fullat1,
                        fullat2,
                    )
                    cursor.execute(
                        'INSERT INTO recent (task_id, effective_uid, affinity, real_activity, first_active_time, last_active_time, last_time_moved, calling_package, user_id, action, component, snap, recimg, fullat1, fullat2)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
                        datainsert)
                    db.commit()

    #Query to create report
    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT 
        task_id as Task_ID, 
        effective_uid as Effective_UID, 
        affinity as Affinity, 
        real_activity as Real_Activity, 
        datetime(first_active_time/1000, 'UNIXEPOCH') as First_Active_Time, 
        datetime(last_active_time/1000, 'UNIXEPOCH') as Last_Active_Time,
        datetime(last_time_moved/1000, 'UNIXEPOCH') as Last_Time_Moved,
        calling_package as Calling_Package, 
        user_id as User_ID, 
        action as Action, 
        component as Component, 
        snap as Snapshot_Image, 
        recimg as Recent_Image
    FROM recent
    ''')
    all_rows = cursor.fetchall()
    colnames = cursor.description

    if len(all_rows) > 0:
        report = ArtifactHtmlReport('Recent Tasks, Snapshots & Images')
        location = os.path.join(folder, 'recent_tasks')
        report.start_artifact_report(report_folder, f'Recent Activity_{uid}',
                                     f'Artifacts located at {location}')
        report.add_script()
        data_headers = ('Key', 'Value')
        image_data_headers = ('Snapshot_Image', 'Recent_Image')

        for row in all_rows:

            if row[2] is None:
                row2 = ''  #'NO DATA'
            else:
                row2 = row[2]

            report.write_minor_header(f'Application: {row2}')

            #do loop for headers
            data_list = []

            for x in range(0, 13):
                if row[x] is None:
                    pass
                else:
                    data_list.append((colnames[x][0], str(row[x])))

            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             folder,
                                             table_id='',
                                             write_total=False,
                                             write_location=False,
                                             cols_repeated_at_bottom=False)

            image_data_row = []
            image_data_list = [image_data_row]

            if row[11] == 'NO IMAGE':
                image_data_row.append('No Image')
            else:
                image_data_row.append(
                    '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                    .format(str(row[11]), folder_name))
            if row[12] == 'NO IMAGE':
                image_data_row.append('No Image')
            else:
                image_data_row.append(
                    '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                    .format(str(row[12]), folder_name))
            report.write_artifact_data_table(image_data_headers,
                                             image_data_list,
                                             folder,
                                             table_id='',
                                             table_style="width: auto",
                                             write_total=False,
                                             write_location=False,
                                             html_escape=False,
                                             cols_repeated_at_bottom=False)
            report.write_raw_html('<br />')

        report.end_artifact_report()
Exemplo n.º 16
0
def get_locationDparked(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
           DATETIME(ZRTVEHICLEEVENTMO.ZDATE + 978307200, 'UNIXEPOCH') AS "DATE",
           DATETIME(ZRTVEHICLEEVENTMO.ZLOCDATE + 978307200, 'UNIXEPOCH') AS "LOCATION DATE",
           ZLOCLATITUDE || ", " || ZLOCLONGITUDE AS "COORDINATES",
           ZVEHICLEIDENTIFIER AS "VEHICLE IDENTIFIER",
           ZLOCUNCERTAINTY AS "LOCATION UNCERTAINTY",
           ZIDENTIFIER AS "IDENTIFIER",
           ZLOCATIONQUALITY AS "LOCATION QUALITY",
           ZUSERSETLOCATION AS "USER SET LOCATION",
           ZUSUALLOCATION AS "USUAL LOCATION",
           ZNOTES AS "NOTES",
           ZPHOTODATA AS "PHOTO DATA",
           ZLOCLATITUDE AS "LATITUDE",
           ZLOCLONGITUDE AS "LONGITUDE",
           ZRTVEHICLEEVENTMO.Z_PK AS "ZRTLEARNEDVISITMO TABLE ID" 
        FROM
           ZRTVEHICLEEVENTMO
        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
       SELECT
         DATETIME(ZRTVEHICLEEVENTMO.ZDATE + 978307200, 'UNIXEPOCH') AS "DATE",
         DATETIME(ZRTVEHICLEEVENTMO.ZLOCDATE + 978307200, 'UNIXEPOCH') AS "LOCATION DATE",
         ZLOCLATITUDE || ", " || ZLOCLONGITUDE AS "COORDINATES",
         ZVEHICLEIDENTIFIER AS "VEHICLE IDENTIFIER",
         ZLOCUNCERTAINTY AS "LOCATION UNCERTAINTY",
         ZIDENTIFIER AS "IDENTIFIER",
         ZLOCATIONQUALITY AS "LOCATION QUALITY",
         ZUSERSETLOCATION AS "USER SET LOCATION",
         ZUSUALLOCATION AS "USUAL LOCATION",
         ZNOTES AS "NOTES",
         ZGEOMAPITEM AS "GEO MAP ITEM",
         ZPHOTODATA AS "PHOTO DATA",
         ZLOCLATITUDE AS "LATITUDE",
         ZLOCLONGITUDE AS "LONGITUDE",
         ZRTVEHICLEEVENTMO.Z_PK AS "ZRTLEARNEDVISITMO TABLE ID" 
      FROM
         ZRTVEHICLEEVENTMO
            ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []

        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            report = ArtifactHtmlReport('RoutineD Vehicle Location')
            report.start_artifact_report(report_folder, 'Vehicle Location')
            report.add_script()
            data_headers = ('Date', 'Location Date', 'Coordinates',
                            'Vehicle Identifier', 'Location Identifier',
                            'Identifier', 'Location Quality',
                            'User Set Location', 'Usual Location', 'Notes',
                            'Photo Data', 'Latitude', 'Longitude', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Vehicle Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Vehicle Location'
            timeline(report_folder, tlactivity, data_list)
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            report = ArtifactHtmlReport('RoutineD Vehicle Location')
            report.start_artifact_report(report_folder, 'Vehicle Location')
            report.add_script()
            data_headers = ('Date', 'Location Date', 'Coordinates',
                            'Vehicle Identifier', 'Location Identifier',
                            'Identifier', 'Location Quality',
                            'User Set Location', 'Usual Location', 'Notes',
                            'Geo Map Item', 'Latitude', 'Longitude',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Vehicle Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Vehicle Location'
            timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in RoutineD Vehicle Location')

    db.close()
    return
Exemplo n.º 17
0
def get_knowCinfocus(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
		SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
				ZSTRUCTUREDMETADATA .Z_DKAPPLICATIONMETADATAKEY__LAUNCHREASON AS "LAUNCH REASON",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
				    WHEN "1" THEN "Sunday"
				    WHEN "2" THEN "Monday"
				    WHEN "3" THEN "Tuesday"
				    WHEN "4" THEN "Wednesday"
				    WHEN "5" THEN "Thursday"
				    WHEN "6" THEN "Friday"
				    WHEN "7" THEN "Saturday"
				END "DAY OF WEEK",
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
				ZOBJECT.ZUUID AS "UUID",	
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
			FROM ZOBJECT
			LEFT JOIN
		         ZSTRUCTUREDMETADATA 
		         ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			WHERE ZSTREAMNAME IS "/app/inFocus"
			''')
    else:
        cursor = db.cursor()
        cursor.execute('''
			SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
				(ZOBJECT.ZENDDATE-ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
				    WHEN "1" THEN "Sunday"
				    WHEN "2" THEN "Monday"
				    WHEN "3" THEN "Tuesday"
				    WHEN "4" THEN "Wednesday"
				    WHEN "5" THEN "Thursday"
				    WHEN "6" THEN "Friday"
				    WHEN "7" THEN "Saturday"
				END "DAY OF WEEK",
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",	
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
			FROM ZOBJECT
			WHERE ZSTREAMNAME IS "/app/inFocus"
					''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10]))

            report = ArtifactHtmlReport('KnowledgeC Application In Focus')
            report.start_artifact_report(report_folder, 'App In Focus')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Launch Reason',
                            'Usage in Seconds', 'Usage in Minutes',
                            'Day of Week', 'GMT Offset', 'Entry Creation',
                            'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application in Focus'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application in Focus'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report = ArtifactHtmlReport('KnowledgeC Application In Focus')
            report.start_artifact_report(report_folder, 'App in Focus')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Usage in Seconds',
                            'Usage in Minutes', 'Day of Week', 'GMT Offset',
                            'Entry Creation', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application in Focus'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application in Focus'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 18
0
def get_applicationstate(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select ait.application_identifier as ai, kvs.value as compat_info,
    (SELECT kvs.value from kvs left join application_identifier_tab on application_identifier_tab.id = kvs.application_identifier
    left join key_tab on kvs.key = key_tab.id  
    WHERE key_tab.key='XBApplicationSnapshotManifest' and kvs.key = key_tab.id
    and application_identifier_tab.id = ait.id
    ) as snap_info
    from kvs 
    left join application_identifier_tab ait on ait.id = kvs.application_identifier
    left join key_tab on kvs.key = key_tab.id 
    where key_tab.key='compatibilityInfo' 
    order by ait.id
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        snap_info_list = []
        for row in all_rows:
            bundleid = str(row[0])
            plist_file_object = io.BytesIO(row[1])
            if row[1].find(b'NSKeyedArchiver') == -1:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(plist_file_object)
                else:
                    plist = biplist.readPlist(plist_file_object)
            else:
                try:
                    plist = nd.deserialize_plist(plist_file_object)
                except (nd.DeserializeError,
                        nd.biplist.NotBinaryPlistException,
                        nd.biplist.InvalidPlistException,
                        nd.plistlib.InvalidFileException,
                        nd.ccl_bplist.BplistError, ValueError, TypeError,
                        OSError, OverflowError) as ex:
                    logfunc(f'Failed to read plist for {row[0]}, error was:' +
                            str(ex))
            if plist:
                if type(plist) is dict:
                    var1 = plist.get('bundleIdentifier', '')
                    var2 = plist.get('bundlePath', '')
                    var3 = plist.get('sandboxPath', '')
                    data_list.append((var1, var2, var3))
                    if row[2]:
                        snap_info_list.append((var1, var2, var3, row[2]))
                else:
                    logfunc(f'For {row[0]} Unexpected type "' +
                            str(type(plist)) +
                            '" found as plist root, can\'t process')
            else:
                logfunc(f'For {row[0]}, plist could not be read!')
        report = ArtifactHtmlReport('Application State')
        report.start_artifact_report(report_folder, 'Application State DB')
        report.add_script()
        data_headers = ('Bundle ID', 'Bundle Path', 'Sandbox Path')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Application State'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Application State data available')

    db.close()
    return
Exemplo n.º 19
0
def get_knowCmediaplaying(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("11"):
        logfunc("Unsupported version for KnowledgC Media Playing" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__ALBUM AS "NOW PLAYING ALBUM", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__ARTIST AS "NOW PLAYING ARTIST", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__GENRE AS "NOW PLAYING GENRE", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__TITLE AS "NOW PLAYING TITLE", 
		ZSTRUCTUREDMETADATA.Z_DKNOWPLAYINGMETADATAKEY__DURATION AS "NOW PLAYING DURATION",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",    
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
		ZOBJECT.ZUUID AS "UUID",
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/media/nowPlaying"	
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Media Playing')
        report.start_artifact_report(report_folder, 'Media Playing',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Now Playing Album',
                        'Now Playing Artists', 'Playing Genre',
                        'Playing Title', 'Now Playing Duration',
                        'Usage in Seconds', 'Usage in Minutes', 'Day of Week',
                        'GMT Offset', 'Entry Creation', 'UUID',
                        'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Media Playing'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Media Playing'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 20
0
def get_sbrowserCookies(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    host_key,
    name,
    value,
    CASE
        last_access_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(last_access_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "last_access_utc", 
    CASE
        creation_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(creation_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "creation_utc", 
    CASE
        expires_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(expires_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "expires_utc", 
    path
    FROM
    cookies
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Browser Cookies')
        report.start_artifact_report(report_folder, 'Browser Cookies')
        report.add_script()
        data_headers = (
            'Host', 'Name', 'Value', 'Last Access Date', 'Created Date',
            'Expiration Date', 'Path'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], (textwrap.fill(row[2], width=50)), row[3],
                 row[4], row[5], row[6]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Browser cookies data available')

    db.close()
    return
Exemplo n.º 21
0
def get_appleWalletPasses(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('json'):
            unique_id = search(r'(?<=ards/)(.*?)(?=.pkpass)',
                               dirname(file_found),
                               flags=DOTALL).group(0)
            filename = '{}_{}'.format(unique_id, basename(file_found))
            shutil.copyfile(file_found, join(report_folder, filename))

        json_files = [
            join(report_folder, file) for file in listdir(report_folder)
            if isfile(join(report_folder, file))
        ]

        if file_found.endswith('.sqlite3'):
            db = open_sqlite_db_readonly(file_found)
            cursor = db.cursor()
            cursor.execute(
                '''SELECT UNIQUE_ID, ORGANIZATION_NAME, TYPE_ID, LOCALIZED_DESCRIPTION, 
                            DATETIME(INGESTED_DATE + 978307200,'UNIXEPOCH'), DELETE_PENDING, ENCODED_PASS, 
                            FRONT_FIELD_BUCKETS, BACK_FIELD_BUCKETS
                            FROM PASS
                            ''')

            all_rows = cursor.fetchall()
            db_file = file_found

    if len(all_rows) > 0:
        for row in all_rows:
            for json_file in json_files:
                if row[0] in basename(json_file):

                    # noinspection PyBroadException
                    try:
                        with open(json_file) as json_content:
                            json_data = json.load(json_content)
                    except Exception:
                        json_data = 'Malformed data'

                    encoded_pass = str(row[6], 'utf-8', 'ignore')
                    front_field = str(row[7], 'utf-8', 'ignore')
                    back_field = str(row[8], 'utf-8', 'ignore')
                    data_list.append(
                        (row[0], row[1], row[2], row[3], row[4], row[5],
                         json_data, front_field, back_field, encoded_pass))

        report = ArtifactHtmlReport('Passes')
        report.start_artifact_report(report_folder, 'Passes')
        report.add_script()
        data_headers = ('Unique ID', 'Organization Name', 'Type',
                        'Localized Description', 'Pass Added',
                        'Pending Delete', 'Pass Details',
                        'Front Fields Content', 'Back Fields Content',
                        'Encoded Pass')
        report.write_artifact_data_table(data_headers, data_list, db_file)
        report.end_artifact_report()

        tsvname = 'Apple Wallet Passes'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Apple Wallet Passes'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Apple Wallet Passes available')

    db.close()
    return
Exemplo n.º 22
0
def get_googleDuo(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)

        if not file_found.endswith('DataStore'):
            continue

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()

        cursor.execute('''
        select
        datetime(contact_reg_data_timestamp/1000000, 'unixepoch'),
        contact_name,
        contact_id,
        contact_number_label,
        datetime(contact_sync_date/1000000, 'unixepoch')
        from contact
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            description = 'Google Duo - Contacts'
            report = ArtifactHtmlReport('Google Duo - Contacts')
            report.start_artifact_report(report_folder,
                                         'Google Duo - Contacts')
            report.add_script()
            data_headers = (
                'Registration Date', 'Name', 'ID', 'Number Label', 'Sync Date'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Duo - Contacts'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Google Duo - Contacts'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('Google Duo - Contacts data available')

        cursor.execute('''
        select
        datetime(call_history.call_history_timestamp, 'unixepoch'),
        call_history.call_history_local_user_id,
        call_history.call_history_other_user_id,
        contact.contact_name,
        strftime('%H:%M:%S',call_history.call_history_duration, 'unixepoch'),
        case call_history.call_history_is_outgoing_call
            when 0 then 'Incoming'
            when 1 then 'Outgoing'
        end,
        case call_history.call_history_is_video_call
            when 0 then ''
            when 1 then 'Yes'
        end
        from call_history
        left join contact on call_history.call_history_other_user_id = contact.contact_id
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            description = 'Google Duo - Call History'
            report = ArtifactHtmlReport('Google Duo - Call History')
            report.start_artifact_report(report_folder,
                                         'Google Duo - Call History')
            report.add_script()
            data_headers = (
                'Timestamp', 'Local User ID', 'Remote User ID', 'Contact Name',
                'Call Duration', 'Call Direction', 'Video Call?'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Duo - Call History'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Google Duo - Call History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('Google Duo - Call History data available')

        cursor.execute('''
        select
        datetime(media_clip_creation_date/1000000, 'unixepoch'),
        datetime(media_clip_message_date/1000000, 'unixepoch'),
        datetime(media_clip_viewed_date/1000000, 'unixepoch'),
        media_clip_local_id,
        case media_clip_source
            when 0 then 'Received'
            when 1 then 'Sent'
        end,
        media_clip_text_representation,
        media_clip_message_id,
        media_clip_md5_checksum,
        media_clip_content_size,
        media_clip_transferred_size
        from media_clip_v2
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []

        if usageentries > 0:
            for row in all_rows:

                clip_creation = row[0]
                clip_message = row[1]
                clip_viewed = row[2]
                local_id = row[3]
                clip_direction = row[4]
                text_rep = row[5]
                message_id = row[6]
                content_size = row[7]
                transferred_size = row[8]
                thumb = ''

                clip_name = str(message_id) + '.png'
                print(clip_name)
                #Check for Clips
                for match in files_found:
                    if clip_name in match:
                        shutil.copy2(match, report_folder)
                        data_file_name = os.path.basename(match)
                        thumb = f'<img src="{report_folder}/{data_file_name}" width="300"></img>'

                data_list.append(
                    (clip_creation, clip_message, clip_viewed, local_id,
                     clip_direction, text_rep, message_id, content_size,
                     transferred_size, thumb))

            description = 'Google Duo - Clips'
            report = ArtifactHtmlReport('Google Duo - Clips')
            report.start_artifact_report(report_folder, 'Google Duo - Clips')
            report.add_script()
            data_headers = (
                'Creation Date', 'Message Date', 'Viewed Date',
                'Local User ID', 'Clip Direction', 'Text Representation',
                'Message ID', 'Content Size', 'Transferred Size', 'Clip'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element

            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_no_escape=['Clip'])
            report.end_artifact_report()

            tsvname = f'Google Duo - Clips'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Google Duo - Clips'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('Google Duo - Clips data available')

        db.close()
        return
Exemplo n.º 23
0
def get_locationDsteps(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        cursor.execute("""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		PACE AS "PACE",
		ACTIVETIME AS "ACTIVE TIME",
		FIRSTSTEPTIME AS "FIRST STEP TIME",
		PUSHCOUNT AS "PUSH COUNT",
		WORKOUTTYPE AS "WORKOUT TYPE",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12]))

            description = ''
            report = ArtifactHtmlReport('LocationD Steps')
            report.start_artifact_report(report_folder, 'Steps', description)
            report.add_script()
            data_headers = ('Start Time', 'Movement Time', 'Count', 'Distance',
                            'Raw Distance', 'Floors Ascended',
                            'Floors Descended', 'Pace', 'Active Time',
                            'First Step Time', 'Push Count', 'Workout Type',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Steps'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available for Steps')

    elif version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute("""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		PACE AS "PACE",
		ACTIVETIME AS "ACTIVE TIME",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            description = ''
            report = ArtifactHtmlReport('LocationD Steps')
            report.start_artifact_report(report_folder, 'Steps', description)
            report.add_script()
            data_headers = ('Start Time', 'Movement Time', 'Count', 'Distance',
                            'Raw Distance', 'Floors Ascended',
                            'Floors Descended', 'Pace', 'Active Time',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Steps'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available for Steps')

    elif version.parse(iOSversion) >= version.parse("8"):
        cursor = db.cursor()
        cursor.execute("""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY		
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7]))

            description = ''
            report = ArtifactHtmlReport('LocationD Steps')
            report.start_artifact_report(report_folder, 'Steps', description)
            report.add_script()
            data_headers = ('Start Time', 'Movement Time', 'Count', 'Distance',
                            'Raw Distance', 'Floors Ascended',
                            'Floors Descended', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Steps'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available for Steps')

    else:
        logfunc('No data available for Steps')

    db.close()
    return
Exemplo n.º 24
0
def get_tikTok(files_found, report_folder, seeker, wrap_text):
    data_list = []
    data_list1 = []

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('_im.db'):
            maindb = file_found
        if file_found.endswith('db_im_xx'):
            attachdb = file_found

    db = open_sqlite_db_readonly(maindb)
    cursor = db.cursor()
    cursor.execute(f"ATTACH DATABASE '{attachdb}' as db_im_xx;")
    cursor.execute('''
        select
        datetime(created_time/1000, 'unixepoch', 'localtime') as created_time,
        UID,
        UNIQUE_ID,
        NICK_NAME,
        json_extract(content, '$.text') as message,
        json_extract(content,'$.display_name') as links_gifs_display_name,
        json_extract(content, '$.url.url_list[0]') as links_gifs_urls,
        read_status,
            case when read_status = 0 then 'Not read'
                when read_status = 1 then 'Read'
                else read_status
            end
        local_info
        from db_im_xx.SIMPLE_USER, msg
        where UID = sender order by created_time
        ''')

    all_rows = cursor.fetchall()

    if len(all_rows) > 0:
        for row in all_rows:

            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8]))

        report = ArtifactHtmlReport('TikTok Messages')
        report.start_artifact_report(report_folder, 'TikTok - Messages')
        report.add_script()
        data_headers = ('Timestamp', 'UID', 'Unique ID', 'Nickname', 'Message',
                        'Link GIF Name', 'Link GIF URL', 'Read?', 'Local Info')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Tiktok Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'TikTok Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No TikTok messages available')

    cursor.execute('''
        select
        UID,
        NICK_NAME,
        UNIQUE_ID,
        INITIAL_LETTER,
        json_extract(AVATAR_THUMB, '$.url_list[0]') as avatarURL,
        FOLLOW_STATUS 
        from SIMPLE_USER
        ''')

    all_rows1 = cursor.fetchall()

    if len(all_rows) > 0:
        for row in all_rows1:

            data_list1.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report = ArtifactHtmlReport('TikTok Contacts')
        report.start_artifact_report(report_folder, 'TikTok - Contacts')
        report.add_script()
        data_headers1 = ('UID', 'Nickname', 'Unique ID', 'Initial Letter',
                         'Avatar URL', 'Follow Status')
        report.write_artifact_data_table(data_headers1, data_list1, file_found)
        report.end_artifact_report()

        tsvname = 'TikTok Contacts'
        tsv(report_folder, data_headers1, data_list1, tsvname)

    else:
        logfunc('No TikTok Contacts available')

    db.close()
Exemplo n.º 25
0
def get_teamsSegment(files_found, report_folder, seeker):
    data_list_location = []
    data_list_motion = []
    data_list_timecheck = []
    data_list_power = []
    data_list_statechange = []

    for file_found in files_found:
        with open(file_found) as file:
            for line in file:
                serial = json.loads(line)
                timestamp = serial[0].replace('T', ' ')
                #print(serial[1])
                if serial[1] == 'location':
                    locationtimestamp = serial[2]['sourceTimestamp']
                    locationtimestamp = locationtimestamp.replace('T', ' ')
                    longitude = serial[2]['longitude']
                    latitude = serial[2]['latitude']
                    speed = serial[2]['speed']
                    altitude = serial[2]['altitude']
                    vertacc = serial[2]['verticalAccuracy']
                    horiacc = serial[2]['horizontalAccuracy']
                    data_list_location.append(
                        (locationtimestamp, longitude, latitude, speed,
                         altitude, vertacc, horiacc))

                if serial[1] == 'motion':
                    motionact = (serial[2]['activityName'])
                    data_list_motion.append((timestamp, motionact))

                if serial[1] == 'timeCheck':
                    tczone = serial[2]['timezone']
                    tcoffset = serial[2]['offset']
                    tcreason = serial[2]['reason']
                    data_list_timecheck.append(
                        (timestamp, tczone, tcoffset, tcreason))

                if serial[1] == 'power':
                    plugged = serial[2]['isPluggedIn']
                    batlvl = serial[2]['batteryLevel']
                    data_list_power.append((timestamp, plugged, batlvl))

                if serial[1] == 'stateChange':
                    agg = ' '
                    for a, b in serial[2].items():
                        agg = agg + (f'{a}: {b} ')
                    agg = agg.lstrip()
                    data_list_statechange.append((timestamp, agg))

    if len(data_list_location) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Locations')
        report.start_artifact_report(report_folder, 'Teams Locations')
        report.add_script()
        data_headers_location = ('Timestamp', 'Longitude', 'Latitude', 'Speed',
                                 'Altitude', 'Vertical Accuracy',
                                 'Horizontal Accuracy')
        report.write_artifact_data_table(data_headers_location,
                                         data_list_location, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Locations'
        tsv(report_folder, data_headers_location, data_list_location, tsvname)

        tlactivity = 'Microsoft Teams Locations'
        timeline(report_folder, tlactivity, data_list_location,
                 data_headers_location)

        kmlactivity = 'Microsoft Teams Locations'
        kmlgen(report_folder, kmlactivity, data_list_location,
               data_headers_location)
    else:
        logfunc('No Microsoft Teams Locations data')

    if len(data_list_motion) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Motion')
        report.start_artifact_report(report_folder, 'Teams Motion')
        report.add_script()
        data_headers_motion = ('Timestamp', 'Activity')
        report.write_artifact_data_table(data_headers_motion, data_list_motion,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Motion'
        tsv(report_folder, data_headers_motion, data_list_motion, tsvname)

        tlactivity = 'Microsoft Teams Motion'
        timeline(report_folder, tlactivity, data_list_motion,
                 data_headers_motion)

    else:
        logfunc('No Microsoft Teams Motion data')

    if len(data_list_timecheck) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Timezone')
        report.start_artifact_report(report_folder, 'Teams Timezone')
        report.add_script()
        data_headers_timecheck = ('Timestamp', 'Timezone', 'Timezone Offset',
                                  'Timezone reason')
        report.write_artifact_data_table(data_headers_timecheck,
                                         data_list_timecheck, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Timezone'
        tsv(report_folder, data_headers_timecheck, data_list_timecheck,
            tsvname)

        tlactivity = 'Microsoft Teams Timezone'
        timeline(report_folder, tlactivity, data_list_timecheck,
                 data_headers_timecheck)

    else:
        logfunc('No Microsoft Teams Timezone data')

    if len(data_list_power) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Power Log')
        report.start_artifact_report(report_folder, 'Teams Power Log')
        report.add_script()
        data_headers_power = ('Timestamp', 'Is plugged in?', 'Battery Level')
        report.write_artifact_data_table(data_headers_power, data_list_power,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Power Log'
        tsv(report_folder, data_headers_power, data_list_power, tsvname)

        tlactivity = 'Microsoft Teams Power Log'
        timeline(report_folder, tlactivity, data_list_power,
                 data_headers_power)

    else:
        logfunc('No Microsoft Teams Power Log data')

    if len(data_list_statechange) > 0:
        report = ArtifactHtmlReport('Microsoft Teams State Change')
        report.start_artifact_report(report_folder, 'Teams State Change')
        report.add_script()
        data_headers_statechange = ('Timestamp', 'Change')
        report.write_artifact_data_table(data_headers_statechange,
                                         data_list_statechange, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams State Change'
        tsv(report_folder, data_headers_statechange, data_list_statechange,
            tsvname)

        tlactivity = 'Microsoft Teams State Change'
        timeline(report_folder, tlactivity, data_list_statechange,
                 data_headers_statechange)

    else:
        logfunc('No Microsoft Teams Power State Change')
Exemplo n.º 26
0
def get_playgroundVault(files_found, report_folder, seeker, wrap_text):

    data_list = []

    for file_found in files_found:
        file_found = str(file_found)

        #filesize = (getsize(file_found))

        if not isfile(file_found):
            continue
        filename = basename(file_found)
        if filename.startswith('._'):
            continue
        if filename.startswith('crypto.KEY_256.xml'):
            tree = ET.parse(file_found)
            root = tree.getroot()
            key = base64.b64decode(
                root.findall('./string[@name="cipher_key"]')[0].text)
            logfunc(f'Encryption key found: {key}')

    for file_found in files_found:
        file_found = str(file_found)

        if not isfile(file_found):
            continue
        filename = basename(file_found)
        if filename.startswith('._'):
            continue
        if filename.startswith('crypto.KEY_256.xml'):
            continue

        with open(file_found, 'rb') as openFile:
            #print('Atttempting to decrypt...')
            fullFile = openFile.read()
            ### IV is after the first 2 bytes
            IV = fullFile[2:14]
            ### Following IV encrypted data minus the GCM validation (16 bytes) at the end
            encryptedData = fullFile[14:-16]
            ### New encryption algo
            cipher = AES.new((key), AES.MODE_GCM, (IV))
            ### Decrypt the data
            decryptedData = cipher.decrypt((encryptedData))
            ### Determine the correct file extension
            fileExtension = filetype.guess(decryptedData)
            ### Open the new output file for writing

            with open(join(report_folder, basename(file_found)),
                      'wb') as decryptedFile:
                decryptedFile.write(decryptedData)
                decryptedFile.close()

                tolink = []
                pathdec = join(report_folder, basename(file_found))
                tolink.append(pathdec)
                thumb = media_to_html(pathdec, tolink, report_folder)
                filename = basename(file_found)

                if 'EIF' in filename:
                    utctime = filename.split('EIF')
                    enctimestamp = datetime.datetime.fromtimestamp(
                        int(utctime[1]) / 1000)
                elif 'EVF' in filename:
                    utctime = filename.split('EVF')
                    enctimestamp = datetime.datetime.fromtimestamp(
                        int(utctime[1]) / 1000)
                else:
                    enctimestamp = ''

                data_list.append((thumb, filename, enctimestamp, file_found))

        if data_list:
            report = ArtifactHtmlReport('Playground Vault')
            report.start_artifact_report(report_folder, 'Playground Vault')
            report.add_script()
            data_headers = ('Media', 'Filename', 'Encrypted On Timestamp',
                            'Full Path')
            maindirectory = str(Path(file_found).parents[1])
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             maindirectory,
                                             html_no_escape=['Media'])
            report.end_artifact_report()

            tsvname = f'Playground Vault'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Playground Vault data available')
Exemplo n.º 27
0
def get_filesAppsdb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('server.db'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT * 
    FROM
    DEVICES
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:

        for row in all_rows:
            data_list.append((row[1], ))

        description = 'Device names that are able to sync to iCloud Drive.'
        report = ArtifactHtmlReport('Files App - iCloud Sync Names')
        report.start_artifact_report(report_folder,
                                     'Files App - iCloud Sync Names',
                                     description)
        report.add_script()
        data_headers = ('Name', )
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Files App - iCloud Sync Names'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Files App - iCloud Sync Names data available')

    cursor.execute('''
    SELECT
    item_birthtime,
    item_filename,
    version_mtime
    FROM
    server_items
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            birthtime = datetime.datetime.fromtimestamp(row[0])
            versionmtime = datetime.datetime.fromtimestamp(row[2])
            data_list.append((birthtime, row[1], versionmtime))

        description = ''
        report = ArtifactHtmlReport('Files App - iCloud Server Items')
        report.start_artifact_report(report_folder,
                                     'Files App - iCloud Server Items',
                                     description)
        report.add_script()
        data_headers = ('Birthtime', 'Filename', 'Version Modified Time')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Files App - iCloud Server Items'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Files App - iCloud Server Items'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Files App - iCloud Server Items data available')
Exemplo n.º 28
0
def get_mobileInstall(files_found, report_folder, seeker):
    counter = 0
    filescounter = 0
    tsv_tml_data_list = []

    mibdatabase = os.path.join(report_folder, 'mib.db')
    db = sqlite3.connect(mibdatabase)
    cursor = db.cursor()
    cursor.execute(
        """
    CREATE TABLE dimm(time_stamp TEXT, action TEXT, bundle_id TEXT, path TEXT)
    """
    )

    db.commit()

    for filename in files_found:
        file = open(filename, "r", encoding="utf8")
        filescounter = filescounter + 1
        for line in file:
            counter = counter + 1
            matchObj = re.search(
                r"(Install Successful for)", line
            )  # Regex for installed applications
            if matchObj:
                actiondesc = "Install successful"
                matchObj1 = re.search(
                    r"(?<= for \(Placeholder:)(.*)(?=\))", line
                )  # Regex for bundle id
                matchObj2 = re.search(
                    r"(?<= for \(Customer:)(.*)(?=\))", line
                )  # Regex for bundle id
                matchObj3 = re.search(
                    r"(?<= for \(System:)(.*)(?=\))", line
                )  # Regex for bundle id
                matchObj4 = re.search(
                    r"(?<= for \()(.*)(?=\))", line
                )  # Regex for bundle id
                if matchObj1:
                    bundleid = matchObj1.group(1)
                elif matchObj2:
                    bundleid = matchObj2.group(1)
                elif matchObj3:
                    bundleid = matchObj3.group(1)
                elif matchObj4:
                    bundleid = matchObj4.group(1)

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                # logfunc(inserttime, actiondesc, bundleid)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    "",
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()
                path = ''
                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))

                # logfunc()

            matchObj = re.search(
                r"(Destroying container with identifier)", line
            )  # Regex for destroyed containers
            if matchObj:
                actiondesc = "Destroying container"
                # logfunc(actiondesc)
                # logfunc("Destroyed containers:")
                matchObj = re.search(
                    r"(?<=identifier )(.*)(?= at )", line
                )  # Regex for bundle id
                if matchObj:
                    bundleid = matchObj.group(1)
                    # logfunc ("Bundle ID: ", bundleid )

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                matchObj = re.search(r"(?<= at )(.*)(?=$)", line)  # Regex for path
                if matchObj:
                    path = matchObj.group(1)
                    # logfunc ("Path: ", matchObj.group(1))

                # logfunc(inserttime, actiondesc, bundleid, path)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    path,
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))
                # logfunc()

            matchObj = re.search(
                r"(Data container for)", line
            )  # Regex Moved data containers
            if matchObj:
                actiondesc = "Data container moved"
                # logfunc(actiondesc)
                # logfunc("Data container moved:")
                matchObj = re.search(
                    r"(?<=for )(.*)(?= is now )", line
                )  # Regex for bundle id
                if matchObj:
                    bundleid = matchObj.group(1)
                    # logfunc ("Bundle ID: ", bundleid )

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                matchObj = re.search(r"(?<= at )(.*)(?=$)", line)  # Regex for path
                if matchObj:
                    path = matchObj.group(1)
                    # logfunc ("Path: ", matchObj.group(1))

                # logfunc(inserttime, actiondesc, bundleid, path)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    path,
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))
                # logfunc()

            matchObj = re.search(
                r"(Made container live for)", line
            )  # Regex for made container
            if matchObj:
                actiondesc = "Made container live"
                # logfunc(actiondesc)
                # logfunc("Made container:")
                matchObj = re.search(
                    r"(?<=for )(.*)(?= at)", line
                )  # Regex for bundle id
                if matchObj:
                    bundleid = matchObj.group(1)
                    # logfunc ("Bundle ID: ", bundleid )

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                matchObj = re.search(r"(?<= at )(.*)(?=$)", line)  # Regex for path
                if matchObj:
                    path = matchObj.group(1)
                    # logfunc ("Path: ", matchObj.group(1))
                # logfunc(inserttime, actiondesc, bundleid, path)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    path,
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))

            matchObj = re.search(
                r"(Uninstalling identifier )", line
            )  # Regex for made container
            if matchObj:
                actiondesc = "Uninstalling identifier"
                # logfunc(actiondesc)
                # logfunc("Uninstalling identifier")
                matchObj = re.search(
                    r"(?<=Uninstalling identifier )(.*)", line
                )  # Regex for bundle id
                if matchObj:
                    bundleid = matchObj.group(1)
                    # logfunc ("Bundle ID: ", bundleid )

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    "",
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))

            matchObj = re.search(r"(main: Reboot detected)", line)  # Regex for reboots
            if matchObj:
                actiondesc = "Reboot detected"
                # logfunc(actiondesc)
                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    "",
                    "",
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))

            matchObj = re.search(
                r"(Attempting Delta patch update of )", line
            )  # Regex for Delta patch
            if matchObj:
                actiondesc = "Attempting Delta patch"
                # logfunc(actiondesc)
                # logfunc("Made container:")
                matchObj = re.search(
                    r"(?<=Attempting Delta patch update of )(.*)(?= from)", line
                )  # Regex for bundle id
                if matchObj:
                    bundleid = matchObj.group(1)
                    # logfunc ("Bundle ID: ", bundleid )

                matchObj = re.search(r"(?<=^)(.*)(?= \[)", line)  # Regex for timestamp
                if matchObj:
                    timestamp = matchObj.group(1)
                    weekday, month, day, time, year = str.split(timestamp)
                    day = day_converter(day)
                    month = month_converter(month)
                    inserttime = (
                            str(year) + "-" + str(month) + "-" + str(day) + " " + str(time)
                    )
                    # logfunc(inserttime)
                    # logfunc(month)
                    # logfunc(day)
                    # logfunc(year)
                    # logfunc(time)
                    # logfunc ("Timestamp: ", timestamp)

                matchObj = re.search(r"(?<= from )(.*)", line)  # Regex for path
                if matchObj:
                    path = matchObj.group(1)
                    # logfunc ("Path: ", matchObj.group(1))
                # logfunc(inserttime, actiondesc, bundleid, path)

                # insert to database
                cursor = db.cursor()
                datainsert = (
                    inserttime,
                    actiondesc,
                    bundleid,
                    path,
                )
                cursor.execute(
                    "INSERT INTO dimm (time_stamp, action, bundle_id, path)  VALUES(?,?,?,?)",
                    datainsert,
                )
                db.commit()

                tsv_tml_data_list.append((inserttime, actiondesc, bundleid, path))
                # logfunc()

    logfunc(f"Logs processed: {filescounter}")
    logfunc(f"Lines processed: {counter}")
    logfunc("")
    file.close

    # Initialize counters
    totalapps = 0
    installedcount = 0
    uninstallcount = 0
    historicalcount = 0
    sysstatecount = 0

    # created folders for reports for App history
    os.makedirs(os.path.join(report_folder, "Apps_Historical"))

    data_list_installed = []
    data_list_uninstalled = []

    # Initialize database connection
    db = sqlite3.connect(mibdatabase)
    cursor = db.cursor()
    # Query to create installed and uninstalled app reports
    cursor.execute("""SELECT distinct bundle_id from dimm""")
    all_rows = cursor.fetchall()
    for row in all_rows:
        # logfunc(row[0])
        distinctbundle = row[0]
        cursor.execute(
            """SELECT * from dimm where bundle_id=? order by time_stamp desc limit 1""",
            (distinctbundle,),
        )
        all_rows_iu = cursor.fetchall()
        for row in all_rows_iu:
            # logfunc(row[0], row[1], row[2], row[3])
            if row[2] == "":
                continue
            elif row[1] == "Destroying container":
                # logfunc(row[0], row[1], row[2], row[3])
                uninstallcount = uninstallcount + 1
                totalapps = totalapps + 1
                # tofile1 = row[0] + ' ' + row[1] + ' ' + row[2] + ' ' + row[3] + '\n'
                data_list_uninstalled.append((row[0], row[2],))
                # logfunc()
            elif row[1] == "Uninstalling identifier":
                # logfunc(row[0], row[1], row[2], row[3])
                uninstallcount = uninstallcount + 1
                totalapps = totalapps + 1
                # tofile1 = row[0] + ' ' + row[1] + ' ' + row[2] + ' ' + row[3] + '\n'
                data_list_uninstalled.append((row[0], row[2],))
                # logfunc()
            else:
                # logfunc(row[0], row[1], row[2], row[3])
                data_list_installed.append((row[0], row[2],))
                installedcount = installedcount + 1
                totalapps = totalapps + 1

    location = f'{filename}'
    description = 'List of Uninstalled apps.'
    report = ArtifactHtmlReport('Apps - Uninstalled')
    report.start_artifact_report(report_folder, 'Apps - Uninstalled', description)
    report.add_script()
    data_headers = ('Last Uninstalled', 'Bundle ID',)
    report.write_artifact_data_table(data_headers, data_list_uninstalled, location)
    report.end_artifact_report()

    location = f'{filename}'
    description = 'List of Installed apps.'
    report = ArtifactHtmlReport('Apps - Installed')
    report.start_artifact_report(report_folder, 'Apps - Installed', description)
    report.add_script()
    data_headers = ('Last Installed', 'Bundle ID',)
    report.write_artifact_data_table(data_headers, data_list_installed, location)
    report.end_artifact_report()

    # Query to create historical report per app

    cursor.execute("""SELECT distinct bundle_id from dimm""")
    all_rows = cursor.fetchall()
    for row in all_rows:
        # logfunc(row[0])
        distinctbundle = row[0]
        if row[0] == "":
            continue
        else:
            f3 = open(os.path.join(report_folder, "Apps_Historical", distinctbundle + ".txt"),
                      "w+",
                      encoding="utf8"
                      )  # Create historical app report per app
            cursor.execute(
                """SELECT * from dimm where bundle_id=? order by time_stamp DESC""",
                (distinctbundle,),
            )  # Query to create app history per bundle_id
            all_rows_hist = cursor.fetchall()
            for row in all_rows_hist:
                # logfunc(row[0], row[1], row[2], row[3])
                tofile3 = row[0] + " " + row[1] + " " + row[2] + " " + row[3] + "\n"
                f3.write(tofile3)
        f3.close()
        historicalcount = historicalcount + 1

    list = []
    data_list = []
    path = os.path.join(report_folder, "Apps_Historical")
    files = os.listdir(path)
    for name in files:
        bun = (f'{name}')
        appendval = (
            f'<a href = "./Mobile Installation Logs/Apps_Historical/{name}" style = "color:blue" target="content">Report</a>')
        data_list.append((bun, appendval))

    location = f'{filename}'
    description = 'Historical App report from the Mobile Installation Logs. All timestamps are in Local Time'
    report = ArtifactHtmlReport('Apps - Historical')
    report.start_artifact_report(report_folder, 'Apps - Historical', description)
    report.add_script()
    data_headers = ('Bundle ID', 'Report Link')
    tsv_data_headers = ('Bundle ID', 'Report Link')
    report.write_artifact_data_table(data_headers, data_list, location, html_escape=False)
    report.end_artifact_report()

    tsvname = 'Mobile Installation Logs - History'
    tsv(report_folder, tsv_data_headers, tsv_tml_data_list, tsvname)
    tlactivity = 'Mobile Installation Logs - History'
    tml_data_headers = ('Timestamp', 'Event', 'Bundle ID', 'Event Path')
    timeline(report_folder, tlactivity, tsv_tml_data_list, tml_data_headers)

    # All event historical in html report
    description = 'Historical App report from the Mobile Installation Logs. All timestamps are in Local Time'
    report = ArtifactHtmlReport('Apps - Historical')
    report.start_artifact_report(report_folder, 'Apps - Historical Combined', description)
    report.add_script()
    data_headers = ('Timestamp', 'Event', 'Bundle ID', 'Event Path')
    report.write_artifact_data_table(data_headers, tsv_tml_data_list, location)
    report.end_artifact_report()

    # Query to create system events
    data_list_reboots = []
    cursor.execute(
        """SELECT * from dimm where action ='Reboot detected' order by time_stamp DESC"""
    )
    all_rows = cursor.fetchall()
    for row in all_rows:
        # logfunc(row[0])
        # logfunc(row[0], row[1], row[2], row[3])
        data_list_reboots.append((row[0], row[1]))
        sysstatecount = sysstatecount + 1

    if len(all_rows) > 0:
        location = f'{filename}'
        description = 'Reboots detected in Local Time.'
        report = ArtifactHtmlReport('State - Reboots')
        report.start_artifact_report(report_folder, 'State - Reboots', description)
        report.add_script()
        data_headers_reboots = ('Timestamp (Local Time)', 'Description')
        report.write_artifact_data_table(data_headers_reboots, data_list_reboots, location)
        report.end_artifact_report()

        tsvname = 'Mobile Installation Logs - Reboots'
        tsv(report_folder, data_headers_reboots, data_list_reboots, tsvname)

    logfunc(f"Total apps: {totalapps}")
    logfunc(f"Total installed apps: {installedcount}")
    logfunc(f"Total uninstalled apps: {uninstallcount}")
    logfunc(f"Total historical app reports: {historicalcount}")
    logfunc(f"Total system state events: {sysstatecount}")

    '''
Exemplo n.º 29
0
def get_knowCuserwaking(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC User Waking Event " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_system_userwakingevent.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/system/userWakingEvent" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC User Waking Event')
        report.start_artifact_report(report_folder, 'User Waking Event',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Day of Week', 'GMT Offset',
                        'Entry Creation', 'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC User Waking Event'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC User Waking Event'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 30
0
def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text):
    recents = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.endswith('.jpg'):
            continue  # Skip jpg files, all others should be protobuf
        elif file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(file_found):  # skip folders
            continue

        with open(file_found, 'rb') as f:
            pb = f.read()
            types = {
                '1': {
                    'type': 'message',
                    'message_typedef': {
                        '1': {
                            'type': 'uint',
                            'name': 'id'
                        },
                        '4': {
                            'type': 'uint',
                            'name': 'timestamp1'
                        },
                        '5': {
                            'type': 'str',
                            'name': 'search-query'
                        },
                        '7': {
                            'type': 'message',
                            'message_typedef': {
                                '1': {
                                    'type': 'str',
                                    'name': 'url'
                                },
                                '2': {
                                    'type': 'str',
                                    'name': 'url-domain'
                                },
                                '3': {
                                    'type': 'str',
                                    'name': 'title'
                                }
                            },
                            'name': 'page'
                        },
                        '8': {
                            'type': 'message',
                            'message_typedef': {
                                '1': {
                                    'type': 'str',
                                    'name': 'category'
                                },
                                '2': {
                                    'type': 'str',
                                    'name': 'engine'
                                }
                            },
                            'name': 'search'
                        },
                        '9': {
                            'type': 'int',
                            'name': 'screenshot-id'
                        },
                        '17': {
                            'type': 'uint',
                            'name': 'timestamp2'
                        },
                    },
                    'name': ''
                }
            }
            values, types = blackboxprotobuf.decode_message(pb, types)
            items = values.get('1', None)
            if items:
                if isinstance(items, dict):
                    # this means only one element was found
                    # No array, just a dict of that single element
                    recents.append((file_found, [items]))
                else:
                    # Array of dicts found
                    recents.append((file_found, items))

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    recent_entries = len(recents)
    if recent_entries > 0:
        description = "Recently searched terms from the Google Search widget and webpages read from Google app (previously known as 'Google Now') appear here."
        report = ArtifactHtmlReport('Google Now & Quick Search recent events')
        report.start_artifact_report(report_folder,
                                     'Recent Searches & Google Now',
                                     description)
        report.add_script()
        data_headers = ('Screenshot', 'Protobuf Data')
        data_list = []
        for file_path, items in recents:
            dir_path, base_name = os.path.split(file_path)
            for item in items:
                screenshot_id = str(item.get('screenshot-id', ''))
                screenshot_file_path = os.path.join(
                    dir_path, f'{base_name}-{screenshot_id}.jpg')
                if os.path.exists(screenshot_file_path):
                    shutil.copy2(screenshot_file_path, report_folder)
                img_html = '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid" style="max-height:600px; min-width:300px" title="{0}"></a>'.format(
                    f'{base_name}-{screenshot_id}.jpg', folder_name)
                recursive_convert_bytes_to_str(
                    item)  # convert all 'bytes' to str
                data_list.append((
                    img_html, '<pre id="json" style="font-size: 110%">' +
                    escape(json.dumps(item, indent=4)).replace('\\n', '<br>') +
                    '</pre>'))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         dir_path,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'google quick search box recent'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No recent quick search or now data available')