Exemplo n.º 1
0
def process_recentactivity(folder, uid, report_folder):

    slash = '\\' if is_platform_windows() else '/'

    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()
    #Create table recent.
    cursor.execute('''
    CREATE TABLE 
    recent(task_id TEXT, effective_uid TEXT, affinity TEXT, real_activity TEXT, first_active_time TEXT, last_active_time TEXT,
    last_time_moved TEXT, calling_package TEXT, user_id TEXT, action TEXT, component TEXT, snap TEXT,recimg TXT, fullat1 TEXT, fullat2 TEXT)
    ''')
    db.commit()
    err = 0
    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)

    for filename in glob.iglob(os.path.join(folder, 'recent_tasks', '**'),
                               recursive=True):
        if os.path.isfile(filename):  # filter dirs
            file_name = os.path.basename(filename)
            #logfunc(filename)
            #logfunc(file_name)
            #numid = file_name.split('_')[0]

            try:
                ET.parse(filename)
            except ET.ParseError:
                logfunc('Parse error - Non XML file? at: ' + filename)
                err = 1
                #print(filename)

            if err == 1:
                err = 0
                continue
            else:
                tree = ET.parse(filename)
                root = tree.getroot()
                #print('Processed: '+filename)
                for child in root:
                    #All attributes. Get them in using json dump thing
                    fullat1 = json.dumps(root.attrib)
                    task_id = (root.attrib.get('task_id'))
                    effective_uid = (root.attrib.get('effective_uid'))
                    affinity = (root.attrib.get('affinity'))
                    real_activity = (root.attrib.get('real_activity'))
                    first_active_time = (root.attrib.get('first_active_time'))
                    last_active_time = (root.attrib.get('last_active_time'))
                    last_time_moved = (root.attrib.get('last_time_moved'))
                    calling_package = (root.attrib.get('calling_package'))
                    user_id = (root.attrib.get('user_id'))
                    #print(root.attrib.get('task_description_icon_filename'))

                    #All attributes. Get them in using json dump thing
                    fullat2 = json.dumps(child.attrib)
                    action = (child.attrib.get('action'))
                    component = (child.attrib.get('component'))
                    icon_image_path = (
                        root.attrib.get('task_description_icon_filename'))

                    #Snapshot section picture
                    snapshot = task_id + '.jpg'
                    #print(snapshot)

                    #check for image in directories
                    check1 = os.path.join(folder, 'snapshots', snapshot)
                    isit1 = os.path.isfile(check1)
                    if isit1:
                        #copy snaphot image to report folder
                        shutil.copy2(check1, report_folder)
                        #snap = r'./snapshots/' + snapshot
                        snap = snapshot
                    else:
                        snap = 'NO IMAGE'
                    #Recent_images section
                    if icon_image_path is not None:
                        recent_image = os.path.basename(icon_image_path)
                        check2 = os.path.join(folder, 'recent_images',
                                              recent_image)
                        isit2 = os.path.isfile(check2)
                        if isit2:
                            shutil.copy2(check2, report_folder)
                            #recimg = r'./recent_images/' + recent_image
                            recimg = recent_image
                        else:
                            recimg = 'NO IMAGE'
                    else:
                        #check for other files not in the XML - all types
                        check3 = glob.glob(
                            os.path.join(folder, 'recent_images', task_id,
                                         '*.*'))
                        if check3:
                            check3 = check3[0]
                            isit3 = os.path.isfile(check3)
                        else:
                            isit3 = 0

                        if isit3:
                            shutil.copy2(check3, report_folder)
                            recimg = os.path.basename(check3)
                        else:
                            recimg = 'NO IMAGE'
                    #else:
                    #    recimg = 'NO IMAGE'
                    #insert all items in database
                    cursor = db.cursor()
                    datainsert = (
                        task_id,
                        effective_uid,
                        affinity,
                        real_activity,
                        first_active_time,
                        last_active_time,
                        last_time_moved,
                        calling_package,
                        user_id,
                        action,
                        component,
                        snap,
                        recimg,
                        fullat1,
                        fullat2,
                    )
                    cursor.execute(
                        'INSERT INTO recent (task_id, effective_uid, affinity, real_activity, first_active_time, last_active_time, last_time_moved, calling_package, user_id, action, component, snap, recimg, fullat1, fullat2)  VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
                        datainsert)
                    db.commit()

    #Query to create report
    db = sqlite3.connect(
        os.path.join(report_folder, 'RecentAct_{}.db'.format(uid)))
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT 
        task_id as Task_ID, 
        effective_uid as Effective_UID, 
        affinity as Affinity, 
        real_activity as Real_Activity, 
        datetime(first_active_time/1000, 'UNIXEPOCH') as First_Active_Time, 
        datetime(last_active_time/1000, 'UNIXEPOCH') as Last_Active_Time,
        datetime(last_time_moved/1000, 'UNIXEPOCH') as Last_Time_Moved,
        calling_package as Calling_Package, 
        user_id as User_ID, 
        action as Action, 
        component as Component, 
        snap as Snapshot_Image, 
        recimg as Recent_Image
    FROM recent
    ''')
    all_rows = cursor.fetchall()
    colnames = cursor.description

    if len(all_rows) > 0:
        report = ArtifactHtmlReport('Recent Tasks, Snapshots & Images')
        location = os.path.join(folder, 'recent_tasks')
        report.start_artifact_report(report_folder, f'Recent Activity_{uid}',
                                     f'Artifacts located at {location}')
        report.add_script()
        data_headers = ('Key', 'Value')
        image_data_headers = ('Snapshot_Image', 'Recent_Image')

        for row in all_rows:

            if row[2] is None:
                row2 = ''  #'NO DATA'
            else:
                row2 = row[2]

            report.write_minor_header(f'Application: {row2}')

            #do loop for headers
            data_list = []

            for x in range(0, 13):
                if row[x] is None:
                    pass
                else:
                    data_list.append((colnames[x][0], str(row[x])))

            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             folder,
                                             table_id='',
                                             write_total=False,
                                             write_location=False,
                                             cols_repeated_at_bottom=False)

            image_data_row = []
            image_data_list = [image_data_row]

            if row[11] == 'NO IMAGE':
                image_data_row.append('No Image')
            else:
                image_data_row.append(
                    '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                    .format(str(row[11]), folder_name))
            if row[12] == 'NO IMAGE':
                image_data_row.append('No Image')
            else:
                image_data_row.append(
                    '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" title="{0}"></a>'
                    .format(str(row[12]), folder_name))
            report.write_artifact_data_table(image_data_headers,
                                             image_data_list,
                                             folder,
                                             table_id='',
                                             table_style="width: auto",
                                             write_total=False,
                                             write_location=False,
                                             html_escape=False,
                                             cols_repeated_at_bottom=False)
            report.write_raw_html('<br />')

        report.end_artifact_report()
Exemplo n.º 2
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_appharvest.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute("""
		SELECT
		DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
		BUNDLEID AS "BUNDLE ID",
		LATITUDE || ", " || LONGITUDE AS "COORDINATES",
		ALTITUDE AS "ALTITUDE",
		HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
		VERTICALACCURACY AS "VERTICAL ACCURACY",
		STATE AS "STATE",
		AGE AS "AGE",
		ROUTINEMODE AS "ROUTINE MODE",
		LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
		HEX(SIG) AS "SIG (HEX)",
		LATITUDE AS "LATITUDE",
		LONGITUDE AS "LONGITUDE",
		SPEED AS "SPEED",
		COURSE AS "COURSE",
		CONFIDENCE AS "CONFIDENCE"
		FROM APPHARVEST
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Coordinates',
                            'Altitude', 'Horizontal Accuracy',
                            'Vertical Accuracy', 'State', 'Age',
                            'Routine Mode', 'Location of Interest Type',
                            'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                            'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_cdmacelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	SID AS "SID",
	NID AS "NID",
	BSID AS "BSID",
	ZONEID AS "ZONEID",
	BANDCLASS AS "BANDCLASS",
	CHANNEL AS "CHANNEL",
	PNOFFSET AS "PNOFFSET",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CDMACELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13],
                 row[14], row[15], row[16], row[17]))

        description = ''
        report = ArtifactHtmlReport('LocationD CDMA Location')
        report.start_artifact_report(report_folder, 'CDMA Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'SID', 'NID',
                        'BSID', 'ZONEID', 'BANDCLASS', 'Channel', 'PNOFFSET',
                        'Altitude', 'Speed', 'Course', 'Confidence',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD CDMA Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD CDMA Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD CDMA Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD CDMA Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_celllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP", 
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	LAC AS "LAC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PSC AS "PSC",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD Cell Location')
        report.start_artifact_report(report_folder, 'Cell Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'LAC', 'CI',
                        'UARFCN', 'PSC', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD Cell Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD Cell Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_ltecelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT 
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PID AS "PID",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM LTECELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('LocationD LTE Location')
        report.start_artifact_report(report_folder, 'LTE Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'CI',
                        'UARFCN', 'PID', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD LTE Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD LTE Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD LTE Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_wifilocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')
Exemplo n.º 3
0
def get_knowCdisksub(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("13"):
        pass
    else:
        logfunc("Unsupported version for KnowledgC Disk Subsystem on iOS " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZSOURCE.ZBUNDLEID AS "BUNDLE ID",
			ZOBJECT.ZVALUESTRING,
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",  
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK", 
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME is "/disk/subsystemAccess"
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Disk Subsystem Access')
        report.start_artifact_report(report_folder, 'Disk Subsystem Access',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Value String',
                        'Usage in Seconds', 'Usage in Minutes', 'Day of Week',
                        'GMT Offset', 'Entry Creation', 'UUID',
                        'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Disk Subsystem Access'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 4
0
def get_knowCwebusage(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Web Usage" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "APP NAME", 
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBDOMAIN AS "DOMAIN",
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBPAGEURL AS "URL",
		ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)",
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
		ZOBJECT.ZUUID AS "UUID", 
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/app/webUsage" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Web Usage')
        report.start_artifact_report(report_folder, 'Web Usage', description)
        report.add_script()
        data_headers = ('Start', 'End', 'App Name', 'Usage in Seconds',
                        'Usage in Minutes', 'Domain', 'URL', 'Device ID',
                        'Day of the Wekk', 'GMT Offset', 'Entry Creation',
                        'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Web Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Web Usage'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 5
0
def get_tikTok(files_found, report_folder, seeker):
    data_list = []
    data_list1 = []

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('db.sqlite'):
            maindb = file_found
        if file_found.endswith('AwemeIM.db'):
            attachdb = file_found

    db = open_sqlite_db_readonly(maindb)
    cursor = db.cursor()
    cursor.execute(f"ATTACH DATABASE '{attachdb}' as AwemeIM;")
    cursor.execute(
        "SELECT name FROM AwemeIM.sqlite_master WHERE type='table' LIMIT 1;")
    tablename = cursor.fetchall()
    for rown in tablename:
        tname = rown[0]

    cursor.execute(f'''
        select
            datetime(localcreatedat, 'unixepoch') as Local_Create_Time,
            sender,
            customid,
            nickname,
            json_extract(content, '$.text') as message,
            json_extract(content, '$.tips') as localresponse,
            json_extract(content,'$.display_name') as links_display_name,
            json_extract(content, '$.url.url_list[0]') as links_gifs_urls,
            case 
                when servercreatedat > 1 then datetime(servercreatedat, 'unixepoch')
                else servercreatedat
                end
            servercreatedat,
            url1 as profilepicURL
        from TIMMessageORM, {tname}
        where uid = sender
        order by Local_Create_Time
        ''')

    all_rows = cursor.fetchall()

    if len(all_rows) > 0:
        for row in all_rows:

            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9]))

        report = ArtifactHtmlReport('TikTok Messages')
        report.start_artifact_report(report_folder, 'TikTok Messages')
        report.add_script()
        data_headers = ('Timestamp', 'Sender', 'Custom ID', 'Nickname',
                        'Message', 'Local Response', 'Link GIF Name',
                        'Link GIF URL', 'Server Create Timestamps',
                        'Profile Pic URL')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Tiktok Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'TikTok Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No TikTok messages available')

    cursor.execute(f'''
        select 
            case 
                when latestchattimestamp > 1 then datetime(latestchattimestamp, 'unixepoch')
                else latestchattimestamp
            end
        latestchattimestamp,
        nickname,
        uid,
        customID,
        url1
        from {tname}
        ''')

    all_rows1 = cursor.fetchall()

    if len(all_rows) > 0:
        description = 'Timestamp corresponds to latest chat if available'
        for row in all_rows1:

            data_list1.append((row[0], row[1], row[2], row[3], row[4]))

        report = ArtifactHtmlReport('TikTok Contacts')
        report.start_artifact_report(report_folder, 'TikTok Contacts',
                                     description)
        report.add_script()
        data_headers1 = ('Timestamp', 'Nickname', 'Unique ID', 'Custom ID',
                         'URL')
        report.write_artifact_data_table(data_headers1, data_list1, file_found)
        report.end_artifact_report()

        tsvname = 'TikTok Contacts'
        tsv(report_folder, data_headers1, data_list1, tsvname)

        tlactivity = 'TikTok Last Contact'
        timeline(report_folder, tlactivity, data_list1, data_headers1)

    else:
        logfunc('No TikTok Contacts available')

    db.close()
Exemplo n.º 6
0
def get_protonmailMessages(files_found, report_folder, seeker, wrap_text):
    
    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('MessagesDatabase.db'):
            continue # Skip all other files
        
        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(messagev3.Time,'unixepoch') AS 'Message Timestamp',
        messagev3.Subject AS 'Subject',
        messagev3.Sender_SenderSerialized AS 'Sender',
        CASE messagev3.Type
            WHEN 0 THEN 'Incoming'
            WHEN 2 THEN 'Outgoing'
        END AS 'Message Direction',
        CASE messagev3.Unread
            WHEN 0 THEN 'Read'
            WHEN 1 THEN 'Unread'
        END AS 'Status',
        messagev3.Size AS 'Message Size',
        CASE messagev3.AccessTime
            WHEN 0 THEN ''
            ELSE datetime(messagev3.AccessTime/1000,'unixepoch')
        END AS 'Accessed Timestamp',
        CASE messagev3.Location
            WHEN 0 THEN 'Inbox'
            WHEN 7 THEN 'Sent'
        END AS 'Folder',
        attachmentv3.file_name AS 'Attachment Name',
        attachmentv3.file_size AS 'Attachment Size',
        messagev3.ToList AS 'To List',
        messagev3.ReplyTos AS 'Reply To',
        messagev3.CCList AS 'CC List',
        messagev3.BCCList AS 'BCC List',
        messagev3.Header AS 'Message Header'
        FROM messagev3
        LEFT JOIN attachmentv3 ON attachmentv3.message_id = messagev3.ID
        ORDER BY messagev3.Time ASC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('ProtonMail - Messages')
            report.start_artifact_report(report_folder, 'ProtonMail - Messages')
            report.add_script()
            data_headers = ('Message Timestamp','Subject','Sender','Message Direction','Status','Message Size','Accessed Timestamp','Folder','Attachment Name','Attachment Size','Message Header') 
            data_list = []
            for row in all_rows:
                data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[14]))

            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'ProtonMail - Messages'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'ProtonMail - Messages'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No ProtonMail - Messages data available')
        
        db.close()
Exemplo n.º 7
0
def get_notificationsXII(files_found, report_folder, seeker):

    bundle_info = get_bundle_info(files_found)
    data_list = []
    exportedbplistcount = 0

    pathfound = str(files_found[0])
    # logfunc(f'Posix to string is: {pathfound}')
    for filepath in glob.iglob(pathfound + "/**", recursive=True):
        # create directory where script is running from
        if os.path.isfile(filepath):  # filter dirs
            file_name = os.path.splitext(os.path.basename(filepath))[0]
            # create directory
            if filepath.endswith('DeliveredNotifications.plist'):
                bundle_id = os.path.basename(os.path.dirname(filepath))
                # open the plist
                p = open(filepath, "rb")
                plist = nd.deserialize_plist(p)
                
                # Empty plist will be { 'root': None }
                if isinstance(plist, dict):
                    continue # skip it, it's empty
                
                # Good plist will be a list of dicts
                for item in plist:
                    creation_date = ''
                    title = ''
                    subtitle = ''
                    message = ''
                    other_dict = {}
                    bundle_name = bundle_info.get(bundle_id, bundle_id)
                    #if bundle_name == 'com.apple.ScreenTimeNotifications':
                    #    pass # has embedded plist!
                    for k, v in item.items():
                        if k == 'AppNotificationCreationDate': creation_date = str(v)
                        elif k == 'AppNotificationMessage': message = v
                        elif k == 'AppNotificationTitle': title = v
                        elif k == 'AppNotificationSubtitle': subtitle = v
                        else:
                            if isinstance(v, bytes):
                                logfunc(f'Found binary data, look into this one later k={k}!')
                            elif isinstance(v, dict):
                                pass # recurse look for plists #TODO
                            elif isinstance(v, list):
                                pass # recurse look for plists #TODO
                            other_dict[k] = str(v)
                    if subtitle:
                        title += f'[{subtitle}]'
                    data_list.append((creation_date, bundle_name, title, message, str(other_dict)))
                p.close()
                
            elif "AttachmentsList" in file_name:
                pass  # future development

    description = 'iOS > 12 Notifications'
    report = ArtifactHtmlReport('iOS Notificatons')
    report.start_artifact_report(report_folder, 'iOS Notifications', description)
    report.add_script()
    data_headers = ('Creation Time', 'Bundle', 'Title[Subtitle]', 'Message', 'Other Details')
    report.write_artifact_data_table(data_headers, data_list, filepath)
    report.end_artifact_report()

    logfunc("Total notifications processed:" + str(len(data_list)))
    #logfunc("Total exported bplists from notifications:" + str(exportedbplistcount))
    
    tsvname = 'Notifications'
    tsv(report_folder, data_headers, data_list, tsvname)

    tlactivity = 'Notifications'
    timeline(report_folder, tlactivity, data_list, data_headers)
    if len(data_list) == 0:
        logfunc("No notifications found.")
Exemplo n.º 8
0
def get_line(files_found, report_folder, seeker, wrap_text):

    source_file_msg = ''
    source_file_call = ''
    line_call_db = ''
    line_msg_db = ''

    for file_found in files_found:

        file_name = str(file_found)
        if file_name.lower().endswith('naver_line'):
            line_msg_db = str(file_found)
            source_file_msg = file_found.replace(seeker.directory, '')

        if file_name.lower().endswith('call_history'):
            line_call_db = str(file_found)
            source_file_call = file_found.replace(seeker.directory, '')

    db = open_sqlite_db_readonly(line_msg_db)
    cursor = db.cursor()
    try:
        cursor.execute('''
                   SELECT m_id, server_name FROM   contacts
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Line - Contacts')
        report.start_artifact_report(report_folder, 'Line - Contacts')
        report.add_script()
        data_headers = (
            'user_id', 'user_name'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Line - Contacts'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_msg)

    else:
        logfunc('No Line Contact Logs found')

    try:
        cursor.execute('''
                    SELECT contact_book_w_groups.id, 
                           contact_book_w_groups.members, 
                           messages.from_mid, 
                           messages.content, 
                           messages.created_time/1000, 
                           messages.attachement_type, 
                           messages.attachement_local_uri, 
                           case messages.status when 1 then "Incoming" 
                           else "Outgoing" end status                           
                    FROM   (SELECT id, 
                                   Group_concat(M.m_id) AS members 
                            FROM   membership AS M 
                            GROUP  BY id 
                            UNION 
                            SELECT m_id, 
                                   NULL 
                            FROM   contacts) AS contact_book_w_groups 
                           JOIN chat_history AS messages 
                             ON messages.chat_id = contact_book_w_groups.id 
                    WHERE  attachement_type != 6
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Line - Messages')
        report.start_artifact_report(report_folder, 'Line - Messages')
        report.add_script()
        data_headers = (
            'Start Time', 'From ID', 'To ID', 'Direction', 'Thread ID',
            'Message', 'Attachments'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            thread_id = None
            if row[1] == None:
                thread_id = row[0]
            to_id = None
            if row[4] == "Outgoing":
                if ',' in row[1]:
                    to_id = row[1]
                else:
                    to_id = row[0]
            attachment = row[6]
            if row[6] is None:
                attachment = None
            elif 'content' in row[6]:
                attachment = None
            created_time = datetime.datetime.fromtimestamp(int(
                row[4])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append((created_time, row[2], to_id, row[7], thread_id,
                              row[3], attachment))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Line - Messages'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_msg)

        tlactivity = f'Line - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Line Messages available')

    db.close()

    db = open_sqlite_db_readonly(line_call_db)
    cursor = db.cursor()
    cursor.execute('''attach database "''' + line_msg_db +
                   '''" as naver_line ''')
    try:
        cursor.execute('''
                    SELECT case Substr(calls.call_type, -1) when "O" then "Outgoing"
                           else "Incoming" end AS direction, 
                           calls.start_time/1000              AS start_time, 
                           calls.end_time/1000                AS end_time, 
                           case when Substr(calls.call_type, -1) = "O" then contact_book_w_groups.members 
                           else null end AS group_members,  
                           calls.caller_mid, 
                           case calls.voip_type when "V" then "Video" 
                              when "A" then "Audio"
                              when "G" then calls.voip_gc_media_type 
                           end   AS call_type
                    FROM   (SELECT id, 
                                   Group_concat(M.m_id) AS members 
                            FROM   membership AS M 
                            GROUP  BY id 
                            UNION 
                            SELECT m_id, 
                                   NULL 
                            FROM   naver_line.contacts) AS contact_book_w_groups 
                           JOIN call_history AS calls 
                             ON calls.caller_mid = contact_book_w_groups.id

        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Line - Call Logs')
        report.start_artifact_report(report_folder, 'Line - Call Logs')
        report.add_script()
        data_headers = (
            'Start Time', 'End Time', 'To ID', 'From ID', 'Direction',
            'Call Type'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            start_time = datetime.datetime.fromtimestamp(int(
                row[1])).strftime('%Y-%m-%d %H:%M:%S')
            end_time = datetime.datetime.fromtimestamp(int(
                row[2])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append(
                (start_time, end_time, row[3], row[4], row[0], row[5]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Line - Call Logs'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_call)

        tlactivity = f'Line - Call Logs'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Line Call Logs found')

    db.close

    return
Exemplo n.º 9
0
def get_healthAll(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        datetime(samples.end_date + 978307200, 'unixepoch'),
        metadata_values.numerical_value,
        case workouts.activity_type 
        when 63 then "high intensity interval training (hiit)" 
        when 37 then "indoor / outdoor run" 
        when 3000 then "other" 
        when 52 then "indoor / outdoor walk" 
        when 20 then "functional training" 
        when 13 then "indoor cycle" 
        when 16 then "elliptical" 
        when 35 then "rower"
        else "unknown"
        end, 
        workouts.duration / 60.00, 
        workouts.total_energy_burned, 
        workouts.total_distance,
        workouts.total_distance*0.621371, 
        case workouts.goal_type 
        when 2 then "minutes" 
        when 0 then "open" 
        end, 
        workouts.goal, 
        workouts.total_flights_climbed, 
        workouts.total_w_steps
        from
        samples, metadata_values, metadata_keys, workouts 
        where metadata_values.object_id = samples.data_id 
        and metadata_keys.rowid = metadata_values.key_id 
        and  workouts.data_id = samples.data_id 
        and workouts.activity_type not null and key is "_HKPrivateWorkoutAverageHeartRate"
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11]))

            report = ArtifactHtmlReport('Health Workout Cadence')
            report.start_artifact_report(report_folder, 'Workout Cadence')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Strides per Min.',
                            'Workout Type', 'Duration in Mins.',
                            'Calories Burned', 'Distance in KM',
                            'Distance in Miles', 'Goal Type', 'Goal',
                            'Flights Climbed', 'Steps')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Workout Cadence'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Workout Cadence'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Workout Cadence')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute("""
        SELECT
        DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH'),
        DATETIME(SAMPLES.END_DATE + 978307200, 'UNIXEPOCH'),
        QUANTITY,
        QUANTITY*3.28084,
        (SAMPLES.END_DATE-SAMPLES.START_DATE)
        FROM
        SAMPLES INNER JOIN QUANTITY_SAMPLES ON SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID 
        WHERE
        SAMPLES.DATA_TYPE = 8
        """)

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries == 0:
            logfunc('No data available in Distance')
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4]))

            description = ''
            report = ArtifactHtmlReport('Health Distance')
            report.start_artifact_report(report_folder, 'Distance',
                                         description)
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Distance in Meters',
                            'Distance in Feet', 'Time in Seconds')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Distance'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Distance'
            timeline(report_folder, tlactivity, data_list, data_headers)

    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute("""
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        datetime(samples.end_date + 978307200, 'unixepoch'),
        metadata_values.numerical_value,
        (samples.end_date-samples.start_date)
        from
        samples 
        left join
        metadata_values 
        on metadata_values.object_id = samples.data_id 
        left join
        metadata_keys 
        on metadata_keys.rowid = metadata_values.key_id 
        left join
        workouts 
        on workouts.data_id = samples.data_id 
        where
        key is "_HKPrivateMetadataKeyElectrocardiogramHeartRate"
        """)

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries == 0:
            logfunc('No data available in ECG Avg. Heart Rate')
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            description = ''
            report = ArtifactHtmlReport('Health ECG Avg Heart Rate')
            report.start_artifact_report(report_folder, 'ECG Avg. Heart Rate',
                                         description)
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'ECG Avg. Heart Rate',
                            'Time in Seconds')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health ECG Avg Heart Rate'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health ECG Avg Heart Rate'
            timeline(report_folder, tlactivity, data_list, data_headers)

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute("""
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        datetime(samples.end_date + 978307200, 'unixepoch'),
        quantity,
        (samples.end_date-samples.start_date)
        from
        samples, quantity_samples 
        where samples.data_id = quantity_samples.data_id 
        and samples.data_type = 12
        """)

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries == 0:
            logfunc('No data available in Flights Climbed')
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

        description = ''
        report = ArtifactHtmlReport('Health Flights Climbed')
        report.start_artifact_report(report_folder, 'Flights Climbed',
                                     description)
        report.add_script()
        data_headers = ('Start Date', 'End Date', 'Flights Climbed',
                        'Time in Seconds')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Health Flights Climbed'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Health Flights Climbed'
        timeline(report_folder, tlactivity, data_list, data_headers)

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute("""
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        original_quantity, 
        unit_strings.unit_string,
        quantity
        from
        samples, quantity_samples, unit_strings 
        where samples.data_type = 5
        and quantity_samples.original_unit = unit_strings.rowid 
        and samples.data_id = quantity_samples.data_id 
        """)

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries == 0:
            logfunc('No data available in Heart Rate')
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            description = ''
            report = ArtifactHtmlReport('Health Heart Rate')
            report.start_artifact_report(report_folder, 'Heart Rate',
                                         description)
            report.add_script()
            data_headers = ('Date', 'Heart Rate', 'Units', 'Quantity')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Heart Rate'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Heart Rate'
            timeline(report_folder, tlactivity, data_list, data_headers)

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        datetime(samples.end_date + 978307200, 'unixepoch'),
        quantity,
        (samples.end_date-samples.start_date)
        from
        samples, quantity_samples 
        where samples.data_type = 75
        and samples.data_id = quantity_samples.data_id 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report = ArtifactHtmlReport('Health Stood Up')
            report.start_artifact_report(report_folder, 'Stood Up')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Stood Up',
                            'Time in Seconds')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Stood Up'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Stood Up'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Stood Up')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(SAMPLES.START_DATE + 978307200, 'unixepoch'),
        datetime(SAMPLES.END_DATE + 978307200, 'unixepoch'),
        QUANTITY_SAMPLES.QUANTITY,
        (SAMPLES.END_DATE - SAMPLES.START_DATE),
        DATA_PROVENANCES.ORIGIN_PRODUCT_TYPE
        FROM SAMPLES, QUANTITY_SAMPLES, DATA_PROVENANCES, OBJECTS
        WHERE SAMPLES.DATA_TYPE = 7 
            AND SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID
            AND SAMPLES.DATA_ID = OBJECTS.DATA_ID
            AND OBJECTS.PROVENANCE = data_provenances.ROWID  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            daily_steps_nested_list = []

            c = 0
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4]))

                date, hour = row[0].split(' ')

                if date not in chain(*daily_steps_nested_list):
                    daily_steps_nested_list.append([date, row[2], row[3]])
                else:
                    for entry in daily_steps_nested_list:
                        if entry[0] == date:
                            entry[1] += row[2]
                            entry[2] += row[3]

            daily_steps_list = [tuple(t) for t in daily_steps_nested_list]

            report = ArtifactHtmlReport('Health Steps')
            report.start_artifact_report(report_folder, 'Steps')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Steps',
                            'Time in Seconds', 'Origin')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Steps'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Steps'
            timeline(report_folder, tlactivity, data_list, data_headers)

            description = 'Cumulative total of the steps gathered from all available sources for each day'
            report = ArtifactHtmlReport('Health Steps per Day')
            report.start_artifact_report(report_folder, 'Steps per Day',
                                         description)
            report.add_script()
            data_headers = ('Date', 'Steps', 'Time in Seconds')
            report.write_artifact_data_table(data_headers, daily_steps_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Steps per Day'
            tsv(report_folder, data_headers, daily_steps_list, tsvname)

            tlactivity = 'Health Steps per Day'
            timeline(report_folder, tlactivity, daily_steps_list, data_headers)

        else:
            logfunc('No data available in Steps')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH'),
        QUANTITY,
        QUANTITY*2.20462
        FROM
        SAMPLES , QUANTITY_SAMPLES 
        WHERE SAMPLES.DATA_TYPE = 3 
        AND "DATE" IS  NOT NULL
        and SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2]))

            report = ArtifactHtmlReport('Health Weight')
            report.start_artifact_report(report_folder, 'Weight')
            report.add_script()
            data_headers = ('Date', 'Weight in KG', 'Weight in LBS')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Weight'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Weight'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Weight')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(samples.start_date + 978307200, 'unixepoch'),
        datetime(samples.end_date + 978307200, 'unixepoch'),
        metadata_values.numerical_value/100.00,
        (metadata_values.numerical_value/100.00)*3.28084,
        case workouts.activity_type 
        when 63 then "high intensity interval training (hiit)" 
        when 37 then "indoor / outdoor run" 
        when 3000 then "other" 
        when 52 then "indoor / outdoor walk" 
        when 20 then "functional training" 
        when 13 then "indoor cycle" 
        when 16 then "elliptical" 
        when 35 then "rower"
        else "unknown"
        end, 
        workouts.duration / 60.00, 
        workouts.total_energy_burned, 
        workouts.total_distance,
        workouts.total_distance*0.621371, 
        case workouts.goal_type 
        when 2 then "minutes" 
        when 0 then "open" 
        end, 
        workouts.goal, 
        workouts.total_flights_climbed, 
        workouts.total_w_steps
        from
        samples, metadata_values, metadata_keys, workouts 
        where metadata_values.object_id = samples.data_id 
        and metadata_keys.rowid = metadata_values.key_id 
        and  workouts.data_id = samples.data_id 
        and workouts.activity_type not null and (key is null or key is  "HKIndoorWorkout")
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11]))

            report = ArtifactHtmlReport('Health Workout General')
            report.start_artifact_report(report_folder, 'Workout General')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Workout Type',
                            'Duration in Min.', 'Calories Burned',
                            'Distance in KM', 'Distance in Miles',
                            'Total Base Energy Burned', 'Goal Type', 'Goal',
                            'Flights Climbed', 'Steps')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Health Workout General'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Health Workout General'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Workout General')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT 
        DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH'),
        DATETIME(SAMPLES.END_DATE + 978307200, 'UNIXEPOCH'),
        cast(ROUND(QUANTITY) as int)
        FROM SAMPLES, QUANTITY_SAMPLES
        WHERE SAMPLES.DATA_TYPE = 172 AND SAMPLES.DATA_ID = QUANTITY_SAMPLES.DATA_ID
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2]))

            report = ArtifactHtmlReport('Environmental Sound Levels')
            report.start_artifact_report(report_folder,
                                         'Environmental Sound Levels')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Sound in dBA SPL')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Environmental Sound Levels'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Environmental Sound Levels'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Environmental Sound Levels')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT 
        DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH'),
        DATETIME(SAMPLES.END_DATE + 978307200, 'UNIXEPOCH'),
        (SAMPLES.END_DATE - SAMPLES.START_DATE)
        FROM SAMPLES
        WHERE SAMPLES.DATA_TYPE = 63
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2]))

            report = ArtifactHtmlReport('Sleep Analysis')
            report.start_artifact_report(report_folder, 'Sleep Analysis')
            report.add_script()
            data_headers = ('Start Date', 'End Date', 'Time in Seconds')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Sleep Analysis'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Sleep Analysis'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No data available in Sleep Analysis')
Exemplo n.º 10
0
def get_sWipehist(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('history'):
            continue # Skip all other files
        
        data_list = []
        
        timestamp = wipe = promptwipe = reason = rebootreason = locale = updateorg = updatepkg = reqtime = ''
        with open(file_found, 'r') as f:
            for line in f:
                #print(line)
                if line.startswith('+'):
                    if '|' in line:
                        timestamp = line.split('|')
                        timestamp = timestamp[1].strip()
                        timestamp = timestamp.replace('/', '-')
                    else:
                        timestamp = line.split(':',1)
                        timestamp = timestamp[1].strip()
                        timestamp = timestamp.replace(']', '')
                        timestamp = timestamp.replace('/', '-')
                if line.startswith('--wipe_data'):
                    wipe = 'Yes'
                if line.startswith('--reason'):
                    reason = line.split('=')
                    reason = reason[1]
                if line.startswith('reboot_reason'):
                    rebootreason = line.split('=')
                    rebootreason = rebootreason[1]
                if line.startswith('reboot reason'):
                    rebootreason = line.split(':', 1)
                    rebootreason = rebootreason[1]
                if line.startswith('--locale'):
                    locale = line.split('=')
                    locale = locale[1]
                if line.startswith('--requested_time'):
                    reqtime = line.split('=')
                    reqtime = reqtime[1].replace('/', '-')
                if line.startswith('--update_org_package'):
                    updateorg = line.split('=')
                    updateorg = updateorg[1]
                if line.startswith('--update_package'):
                    updatepkg = line.split('=')
                    updatepkg = updatepkg[1]
                if line.startswith('--prompt_and_wipe_data'):
                    promptwipe = 'Yes'
                    wipe = 'Yes'
                if line.startswith('-\n'):
                    if wipe == 'Yes':
                        data_list.append((timestamp, wipe, promptwipe, reason, rebootreason, locale, reqtime, updateorg, updatepkg))
                    timestamp = wipe = promptwipe = reason = rebootreason = locale = updateorg = updatepkg = reqtime = ''
                            
                        
        if data_list:
            report = ArtifactHtmlReport('Samsung Wipe History')
            report.start_artifact_report(report_folder, 'Samsung Wipe History')
            report.add_script()
            data_headers = ('Timestamp', 'Wipe', 'Promtp & Wipe', 'Reason', 'Reboot Reason', 'Locale', 'Request Timestamp', 'Update ORG', 'Update PKG')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'Samsung Wipe History'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Samsung Wipe History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Samsung Wipe History data available')
Exemplo n.º 11
0
def get_knowCwidget(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Widgets Viewed " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_widgets_viewed.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID",  
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/widgets/viewed" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Widgets Viewed')
        report.start_artifact_report(report_folder, 'Widgets Viewed',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Usage in Seconds',
                        'Usage in Minutes', 'Day of Week', 'GMT Offset',
                        'Entry Creation', 'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Widgets Viewed'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Widgets Viewed'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 12
0
def get_viber(files_found, report_folder, seeker):
	viber_settings = {}
	for file_found in files_found:
		file_found = str(file_found)

		iOSversion = scripts.artifacts.artGlobals.versionf
		if version.parse(iOSversion) < version.parse("14"):
			logfunc("Viber parsing has not be tested on this iOS " + iOSversion + " version. Please contact @theAtropos4n6 for resolving this issue.")

		if version.parse(iOSversion) >= version.parse("14"):
			if file_found.endswith('Settings.data'):
				db = open_sqlite_db_readonly(file_found)
				cursor = db.cursor()
				cursor.execute('''
					SELECT
						Data.key,value
					FROM Data
					WHERE Data.key IN 
						(
						'_myUserName',
						'_currentEmail',
						'_myPhoneNumber',
						'_myCanonizedPhoneNumber',
						'_myFormattedPhoneNumber',
						'_myCountryPhoneCode',
						'_myCountryCode',
						'_myLanguageCode',
						'_wasabiLastKnownUserLocation',
						'_uid',
						'_appVersion',
						'_savedDeviceId',
						'_attemptsToDownloadBackupForRestore',
						'_backupAttemptsCount',
						'_hiddenChatsPINData',
						'_myPhotoLocalID'
						)
					UNION
					SELECT
						Data.key,
						CASE 
						WHEN value LIKE '-%' THEN value
						ELSE datetime(value,'unixepoch')
						END
					FROM Data
					WHERE Data.key IN 
						('_registrationDate',
						'_autoBackupLastRunTime',
						'_lastBackupStartDate')
					UNION
					SELECT
						Data.key,
						datetime(value,'unixepoch') -- this value is stored in the user localtime
					FROM Data
					WHERE Data.key IS '_birthdate'
					ORDER BY value
				''')
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows:
						viber_settings[row[0]] = row[1]
						temp_list = list(row)
						if temp_list[0] == '_appVersion':
							temp_list[0] = 'Application Version'
						elif temp_list[0] == '_lastBackupStartDate':
							temp_list[0] = 'Last Backup Start Date - UTC'
						elif temp_list[0] == '_myUserName':
							temp_list[0] = 'User Name'
						elif temp_list[0] == '_currentEmail':
							temp_list[0] = 'Current Email'
						elif temp_list[0] == '_birthdate':
							temp_list[0] = "Birth Date - UTC (apply user's localtime offset)"
						elif temp_list[0] == '_registrationDate':
							temp_list[0] = 'Registration Date - UTC'
						elif temp_list[0] == '_uid':
							temp_list[0] = 'User ID'
						elif temp_list[0] == '_myPhoneNumber':
							temp_list[0] = 'Phone Number'
						elif temp_list[0] == '_myCanonizedPhoneNumber':
							temp_list[0] = 'Canonized Phone Number'
						elif temp_list[0] == '_myFormattedPhoneNumber':
							temp_list[0] = 'Formatted Phone Number'
						elif temp_list[0] == '_myCountryPhoneCode':
							temp_list[0] = 'Country Phone Code'
						elif temp_list[0] == '_myCountryCode':
							temp_list[0] = 'Country Code'
						elif temp_list[0] == '_myLanguageCode':
							temp_list[0] = 'Language Code'
						elif temp_list[0] == '_wasabiLastKnownUserLocation':
							temp_list[0] = 'Last Known User Location'
						elif temp_list[0] == '_savedDeviceId':
							temp_list[0] = 'Device ID'
						elif temp_list[0] == '_myPhotoLocalID':
							temp_list[0] = 'Profile Picture'
							try:
								if temp_list[1] is not None:
									thumb = media_to_html(temp_list[1], files_found, report_folder)
									temp_list[1] = thumb
								else:
									thumb = ''								
							except:
								pass
						elif temp_list[0] == '_attemptsToDownloadBackupForRestore':
							temp_list[0] = 'Attempts To Download Backup For Restore'
							try:
								int.from_bytes(temp_list[1], byteorder='big') #needs further validation about the byteorder
							except Exception as err:
								logfunc(f'Viber - Settings "_attemptsToDownloadBackupForRestore" could not be extracted. The error was: {err}' )
						elif temp_list[0] == '_backupAttemptsCount':
							temp_list[0] = 'Backup Attempts Count'
							try:
								int.from_bytes(temp_list[1], byteorder='big') #needs further validation about the byteorder
							except Exception as err:
								logfunc(f'Viber - Settings "_backupAttemptsCount" could not be extracted. The error was: {err}' )
						elif temp_list[0] == '_autoBackupLastRunTime':
							temp_list[0] = 'Auto Backup Last Run Time - UTC'
							x = str(temp_list[1])
							if x.startswith("-"):
								temp_list[1] = 'Not Applied'
						elif temp_list[0] == '_lastBackupStartDate':
							x = str(temp_list[1])
							if x.startswith("-"):
								temp_list[1] = 'Not Applied'
						elif temp_list[0] == '_hiddenChatsPINData':
							temp_list[0] = 'Hidden Chats PIN Data'
						row = tuple(temp_list)
						data_list.append((row[0], row[1]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Settings')
					report.start_artifact_report(report_folder, 'Viber - Settings')
					report.add_script()
					data_headers = ('Setting','Value')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()

					tsvname = 'Viber - Settings'
					tsv(report_folder, data_headers, data_list, tsvname)
		
				db.close()

			elif file_found.endswith('Contacts.data'):

				db = open_sqlite_db_readonly(file_found)
				cursor = db.cursor()

				cursor.execute('''
					SELECT
						ZABCONTACT.ZMAINNAME AS 'Main Name',
						ZABCONTACT.ZPREFIXNAME AS 'Prefix Name',
						ZABCONTACT.ZSUFFIXNAME AS 'Suffix Name',
						ZABCONTACTNUMBER.ZPHONE AS 'Phone Number',
						ZABCONTACTNUMBER.ZCANONIZEDPHONENUM AS 'Canonized Phone Number',
						ZABCONTACT.ZCONTACTID AS 'Contact ID'
						FROM ZABCONTACT
						LEFT JOIN ZABCONTACTNUMBER ON ZABCONTACT.Z_PK = ZABCONTACTNUMBER.ZCONTACT
						UNION
						SELECT
						ZABCONTACT.ZMAINNAME AS 'Main Name',
						ZABCONTACT.ZPREFIXNAME AS 'Prefix Name',
						ZABCONTACT.ZSUFFIXNAME AS 'Suffix Name',
						ZABCONTACTNUMBER.ZPHONE AS 'Phone Number',
						ZABCONTACTNUMBER.ZCANONIZEDPHONENUM AS 'Canonized Phone Number',
						ZABCONTACT.ZCONTACTID AS 'Contact ID'
					FROM ZABCONTACTNUMBER
					LEFT JOIN ZABCONTACT ON ZABCONTACT.Z_PK = ZABCONTACTNUMBER.ZCONTACT
					ORDER BY ZMAINNAME
				''')
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows: 
						data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Contacts')
					report.start_artifact_report(report_folder, 'Viber - Contacts')
					report.add_script()
					data_headers = ('Main Name','Prefix Name','Suffix Name','Phone Number','Canonized Phone Number','Contact ID')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()


					tsvname = 'Viber - Contacts'
					tsv(report_folder, data_headers, data_list, tsvname)
					
				cursor.execute('''
					SELECT
						datetime(ZRECENT.ZDATE+ 978307200,'unixepoch') AS 'Timestamp - UTC',
						ZRECENT.ZRECENTSLINE AS 'EMPTY DUMMY COLUMN',
						CASE
								WHEN ZRECENT.ZCALLTYPE = 'missed' THEN 'Missed Audio Call'
								WHEN ZRECENT.ZCALLTYPE = 'missed_with_video' THEN 'Missed Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'outgoing_viber' THEN 'Outgoing Audio Call'
								WHEN ZRECENT.ZCALLTYPE = 'outgoing_viber_with_video' THEN 'Outgoing Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'incoming_with_video' THEN 'Incoming Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'incoming' THEN 'Incoming Audio Call'
								ELSE ZRECENT.ZCALLTYPE
						end  AS 'Call Type',
						ZRECENT.ZDURATION AS 'Duration'
					FROM ZRECENT
					WHERE ZRECENT.ZCALLLOGMESSAGE IS NULL AND ZRECENT.ZRECENTSLINE IS NULL
				''')

				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows: 
						temp_list = list(row)
						try:
							if 'Outgoing' in temp_list[2]:
								temp_list[1] = str(viber_settings['_myUserName']) + ',' + str(viber_settings['_myPhoneNumber'])
						except TypeError:
							pass
						row = tuple(temp_list)
						data_list.append((row[0], row[1], row[2], row[3]))



				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Call Remnants')
					report.start_artifact_report(report_folder, 'Viber - Call Remnants')
					report.add_script()
					data_headers = ('Timestamp - UTC','Caller','Call Type','Duration')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()


				cursor.execute('''
					SELECT 	
							CHAT_MEMBER.ZDISPLAYFULLNAME AS 'Sender (Display Full Name)',
							CHAT_MEMBER.ZDISPLAYSHORTNAME AS 'Sender (Display Short Name)',
							CHAT_MEMBER.ZPHONE AS 'Sender (Phone)',
							CHATS.Chat_Name AS 'Chat Name',
							CHATS.CHAT_MEMBERS AS 'Chat Participant(s)',
							CHATS.CHAT_PHONES 'Chat Phone(s)',
							datetime(ZVIBERMESSAGE.ZSTATEDATE+ 978307200,'unixepoch') AS 'Message Creation Date - UTC',
							datetime(ZVIBERMESSAGE.ZDATE+ 978307200,'unixepoch') AS 'Message Change State Date - UTC',
							datetime(RECENT.ZRECENTDATE+ 978307200,'unixepoch') AS 'Call Date - UTC',
							CASE
								WHEN ZCALLTYPE = 'missed' THEN 'Missed Audio Call'
								WHEN ZCALLTYPE = 'missed_with_video' THEN 'Missed Video Call'
								WHEN ZCALLTYPE = 'outgoing_viber' THEN 'Outgoing Audio Call'
								WHEN ZCALLTYPE = 'outgoing_viber_with_video' THEN 'Outgoing Video Call'
								WHEN ZCALLTYPE = 'incoming_with_video' THEN 'Incoming Video Call'
								WHEN ZCALLTYPE = 'incoming' THEN 'Incoming Audio Call'
								ELSE ZCALLTYPE
							end  AS 'Call Type',
							CASE
								WHEN ZVIBERMESSAGE.ZSTATE IN ('send','delivered') THEN 'Outgoing'
								WHEN ZVIBERMESSAGE.ZSTATE = 'received' THEN 'Incoming'
								ELSE ZVIBERMESSAGE.ZSTATE
							END AS 'State',
							RECENT.ZDURATION AS 'Duration',
							ZVIBERMESSAGE.ZSYSTEMTYPE 'System Type Description',
							ZVIBERMESSAGE.ZMETADATA AS 'Message Metadata',
							ZVIBERMESSAGE.ZTEXT AS 'Message Content',
							ZATTACHMENT.ZNAME AS 'Attachment Name',
							ZATTACHMENT.ZTYPE AS 'Attachment Type',
							ZATTACHMENT.ZFILESIZE AS 'Attachment Size',
							ZVIBERLOCATION.ZLATITUDE AS 'Latitude',
							ZVIBERLOCATION.ZLONGITUDE AS 'Longitude',
							CASE
								WHEN CHATS.Chat_Deleted = 1 THEN 'True'
								WHEN CHATS.Chat_Deleted = 0 THEN 'False'
								ELSE CHATS.Chat_Deleted
							END AS 'Conversation Deleted',
							CASE
								WHEN ZVIBERMESSAGE.ZBEINGDELETED = 1 THEN 'True'
								WHEN ZVIBERMESSAGE.ZBEINGDELETED = 0 THEN 'False'
								ELSE ZVIBERMESSAGE.ZBEINGDELETED
							END AS 'Message Deleted',
							CHATS.ZTIMEBOMBDURATION AS 'Conversation Time Bomb Duration',
							ZVIBERMESSAGE.ZTIMEBOMBDURATION AS 'Message Time Bomb Duration',
							datetime(ZVIBERMESSAGE.ZTIMEBOMBTIMESTAMP+ 978307200,'unixepoch') AS 'Message Time Bomb Timestamp',
							CASE 
								WHEN CHATS.Chat_Favorite= 1 THEN 'True'
								WHEN CHATS.Chat_Favorite = 0 THEN 'False'
								ELSE CHATS.Chat_Favorite
							END AS 'Conversation Marked Favorite',
							ZVIBERMESSAGE.ZLIKESCOUNT AS 'Likes Count'
					FROM
						ZVIBERMESSAGE
					LEFT JOIN
							(SELECT
								ZVIBERMESSAGE.ZCONVERSATION,
								ZCONVERSATION.ZNAME AS 'Chat_Name',
								ZCONVERSATION.ZBEINGDELETED AS 'Chat_Deleted',
								ZCONVERSATION.ZFAVORITE AS 'Chat_Favorite',
								ZCONVERSATION.ZTIMEBOMBDURATION,
								coalesce(ZVIBERMESSAGE.ZPHONENUMINDEX,ZCONVERSATION.ZINTERLOCUTOR) AS 'MEMBER_ID',
								MEMBER.ZDISPLAYFULLNAME,
								MEMBER.ZDISPLAYSHORTNAME,
								MEMBER.ZNAME AS 'Participant_Name',
								MEMBER.ZCANONIZEDPHONENUM,
								MEMBER.ZPHONE,
								group_concat(DISTINCT(MEMBER.ZDISPLAYFULLNAME)) AS 'CHAT_MEMBERS',
								group_concat(DISTINCT(MEMBER.ZPHONE)) AS 'CHAT_PHONES',
								group_concat(DISTINCT(MEMBER.ZCANONIZEDPHONENUM)) AS 'CHAT_CANONIZED_PHONES'
							FROM ZVIBERMESSAGE,ZCONVERSATION
								LEFT JOIN
										(SELECT 
													ZMEMBER.ZDISPLAYFULLNAME,
													ZMEMBER.ZDISPLAYSHORTNAME,
													ZMEMBER.ZNAME,
													ZPHONENUMBER.ZCANONIZEDPHONENUM,
													ZPHONENUMBER.ZPHONE,
													ZMEMBER.Z_PK
											FROM ZMEMBER
											LEFT JOIN ZPHONENUMBER ON ZMEMBER.Z_PK = ZPHONENUMBER.ZMEMBER
											UNION
											SELECT 
													ZMEMBER.ZDISPLAYFULLNAME,
													ZMEMBER.ZDISPLAYSHORTNAME,
													ZMEMBER.ZNAME,
													ZPHONENUMBER.ZCANONIZEDPHONENUM,
													ZPHONENUMBER.ZPHONE,
													ZMEMBER.Z_PK
											FROM ZPHONENUMBER
											LEFT JOIN ZMEMBER ON ZPHONENUMBER.ZMEMBER  = ZMEMBER.Z_PK
										) AS MEMBER ON MEMBER.Z_PK = MEMBER_ID
								LEFT JOIN ZPHONENUMBER ON MEMBER_ID = ZPHONENUMBER.ZMEMBER
							WHERE ZVIBERMESSAGE.ZCONVERSATION = ZCONVERSATION.Z_PK
							GROUP BY ZVIBERMESSAGE.ZCONVERSATION
					) CHATS ON ZVIBERMESSAGE.ZCONVERSATION = CHATS.ZCONVERSATION

					LEFT JOIN
							(SELECT 
										ZMEMBER.ZDISPLAYFULLNAME,
										ZMEMBER.ZDISPLAYSHORTNAME,
										ZMEMBER.ZNAME,
										ZPHONENUMBER.ZCANONIZEDPHONENUM,
										ZPHONENUMBER.ZPHONE,
										ZMEMBER.Z_PK
								FROM ZMEMBER
								LEFT JOIN ZPHONENUMBER ON ZMEMBER.Z_PK = ZPHONENUMBER.ZMEMBER
								UNION
								SELECT 
										ZMEMBER.ZDISPLAYFULLNAME,
										ZMEMBER.ZDISPLAYSHORTNAME,
										ZMEMBER.ZNAME,
										ZPHONENUMBER.ZCANONIZEDPHONENUM,
										ZPHONENUMBER.ZPHONE,
										ZMEMBER.Z_PK
								FROM ZPHONENUMBER
								LEFT JOIN ZMEMBER ON ZPHONENUMBER.ZMEMBER  = ZMEMBER.Z_PK
							) AS CHAT_MEMBER ON ZVIBERMESSAGE.ZPHONENUMINDEX = CHAT_MEMBER.Z_PK
					LEFT JOIN
							(SELECT
												ZRECENT.ZDURATION,
												ZRECENT.ZCALLLOGMESSAGE,
												ZRECENT.ZDATE AS 'ZRECENTDATE',
												ZRECENTSLINE.ZDATE AS 'ZRECENTSLINEDATE',
												ZRECENT.ZCALLTYPE AS 'CALL TYPE',
												ZRECENTSLINE.ZCALLTYPE AS 'CALL TYPE',
												ZRECENTSLINE.ZPHONENUMBER AS 'PHONE NUMBER'
										FROM ZRECENT
										LEFT JOIN ZRECENTSLINE ON ZRECENT.ZRECENTSLINE = ZRECENTSLINE.Z_PK
							) AS RECENT ON ZVIBERMESSAGE.Z_PK = RECENT.ZCALLLOGMESSAGE 
					LEFT JOIN ZVIBERLOCATION ON ZVIBERMESSAGE.ZLOCATION = ZVIBERLOCATION.Z_PK
					LEFT JOIN ZATTACHMENT ON ZVIBERMESSAGE.ZATTACHMENT = ZATTACHMENT.Z_PK

					ORDER BY ZVIBERMESSAGE.Z_PK
								''')
					
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows:
						temp_list = list(row)
						temp_chats_names = str(temp_list[4])
						temp_list[4] = temp_chats_names + ',' + str(viber_settings['_myUserName'])
						temp_chats_phones = str(temp_list[5])
						temp_list[5] = temp_chats_phones + ',' + str(viber_settings['_myPhoneNumber'])
						if temp_list[13]:
							y = json.loads(temp_list[13], strict=False) # the key that stores geolocation data is ['pa_message_data']['rich_media']['Buttons'][2]['Map']
							#if the 'Map' key is identified successfully it will assign lat,lon to the corresponding columns, otherwise it will continue on (passing any key,index errors)
							temp_list[13] = ''
							try:
								temp_list[18] = y['pa_message_data']['rich_media']['Buttons'][2]['Map']['Latitude']
								temp_list[19] = y['pa_message_data']['rich_media']['Buttons'][2]['Map']['Longitude']
							except (KeyError,IndexError) as e:
								pass

							#What this ugly long list of dict keys simply does is that it extracts only specific fields identified as important from the whole dictionary.
							#The reason why we extract only specific fields is because the report is much prettier. In order to have a complete picture you will have to go through the whole dictionary
							#while inspecting the .db itself. Therefore this column is named as 'Message Metadata Fragments'

							#general values
							if "Text" in y:
								try:
									temp_list[13] += "Text: "+ str(y['Text'])+","
								except KeyError:
									pass
							if "Title" in y:
								try:
									temp_list[13] += "Title: "+ str(y['Title'])+","
								except KeyError:
									pass
							if "URL" in y:
								try:
									temp_list[13] += "URL: "+ str(y['URL'])+","
								except KeyError:
									pass
							if "ThumbnailURL" in y:
								try:
									temp_list[13] += "ThumbnailURL: "+ str(y['ThumbnailURL'])+","
								except KeyError:
									pass
							if "Type" in y:
								try:
									temp_list[13] += "Type: "+ str(y['Type'])+","
								except KeyError:
									pass
						
							if "generalFwdInfo" in y:
								try:
									temp_list[13] += "Original Chat ID: "+ str(y['generalFwdInfo']['orig_chat_id'])+","
								except KeyError:
									pass

							if "audio_ptt" in y:
								try:
									temp_list[13] += "Audio Duration: "+ str(y['audio_ptt']['duration'])+","
								except KeyError:
									pass

							#fileInfo values
							if "fileInfo" in y:
								try:
									temp_list[13] += "File Info - Content Type: "+ str(y['fileInfo']['ContentType'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Type: "+ str(y['fileInfo']['Type'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Hash: "+ str(y['fileInfo']['FileHash'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Name: "+ str(y['fileInfo']['FileName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Extension: "+ str(y['fileInfo']['FileExt'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Duration: "+ str(y['fileInfo']['Duration'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Size: "+ str(y['fileInfo']['FileSize'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Original Size: "+ str(y['fileInfo']['OrigSize'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - iOS Origin: "+ str(y['fileInfo']['mediaInfo']['ios_origin'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Width: "+ str(y['fileInfo']['mediaInfo']['Width'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Height: "+ str(y['fileInfo']['mediaInfo']['Height'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Media Type: "+ str(y['fileInfo']['mediaInfo']['MediaType'])+","
								except KeyError:
									pass

							#custom_sticker_info values
							if "custom_sticker_info" in y:
								try:
									temp_list[13] += "Custom Sticker Info - Package ID: "+ str(y['custom_sticker_info']['package_id'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Custom Sticker Info - Sticker ID: "+ str(y['custom_sticker_info']['sticker_id'])+","
								except KeyError:
									pass

							#groupReferralInfo values
							if "groupReferralInfo" in y:
								try:
									temp_list[13] += "Group ID: "+ str(y['groupReferralInfo']['groupID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Group Name: "+ str(y['groupReferralInfo']['groupName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Invite Link: "+ str(y['groupReferralInfo']['inviteLink'])+","
								except KeyError:
									pass

							#pa_message_data values
							if "pa_message_data" in y:
								try:
									temp_list[13] += "Message Data - Text: "+ str(y['pa_message_data']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Sender Name: "+ str(y['pa_message_data']['sender']['name'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Alt Text: "+ str(y['pa_message_data']['alt_text'])+"," 
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Favorites Metadata - URL: "+ str(y['pa_message_data']['rich_media']['FavoritesMetadata']['url'])+","
								except KeyError:
									pass

							#pin values
							if "pin" in y:
								try:
									temp_list[13] += "Pin - Action: "+ str(y['pin']['action'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Pin - Text: "+ str(y['pin']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Pin - Description: "+ str(y['pin']['extended']['descr'])+","
								except KeyError:
									pass

							#poll values
							if "poll" in y:
								try:
									temp_list[13] += "Poll - Group ID: "+ str(y['poll']['groupID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Type: "+ str(y['poll']['type'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Sender ID: "+ str(y['poll']['senderID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Multiple: "+ str(y['poll']['multiple'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Quiz Text: "+ str(y['poll']['quiz_text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Description: "+ str(y['poll']['extended']['descr'])+","
								except KeyError:
									pass
								try:
									if y['poll']['options']:
										z = ''
										for x in y['poll']['options']:
											try:
												z = x['count']
												temp_list[13] += "Poll - Options - Count: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
											try:
												z = x['name']
												temp_list[13] += "Poll - Options - Name: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
											try:
												z = x['isLiked']
												temp_list[13] += "Poll - Options - Is Liked: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
								except (KeyError,IndexError) as e:
									pass

							#quote values
							if "quote" in y:
								try:
									temp_list[13] += "Quote - Text: "+ str(y['quote']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Name: "+ str(y['quote']['name'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment Name: "+ str(y['quote']['attachmentName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment UID: "+ str(y['quote']['attachmentUID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment Preview Path: "+ str(y['quote']['attachmentPreviewPath'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Text Meta Info - Data: "+ y['quote']['textMetaInfo_v2'][0]['data']+","
								except (KeyError,IndexError) as e:
									pass
		
						if temp_list[10] == 'Outgoing':
							temp_list[0] = viber_settings['_myUserName']
							temp_list[1] = ''
							temp_list[2] = viber_settings['_myPhoneNumber']
						
						if row[15] is not None:
							thumb = media_to_html(row[15], files_found, report_folder)
						else:
							thumb = ''
						
						row = tuple(temp_list)
						data_list.append((row[6], row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[14], row[15], thumb, row[8], row[9], row[10], row[11], row[12], row[16], row[17], row[18],row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[13]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Chats')
					report.start_artifact_report(report_folder, 'Viber - Chats')
					report.add_script()
					data_headers = ('Timestamp', 'Sender (Display Full Name)','Sender (Display Short Name)','Sender (Phone)','Chat Name','Chat Participant(s)','Chat Phone(s)', 'Message Creation Date - UTC','Message Change State Date - UTC','Message Content','Attachment Name', 'Attachment','Call Date - UTC','Call Type','State','Duration (Seconds)','System Type Description','Attachment Type','Attachment Size','Latitude','Longitude','Conversation Deleted','Message Deleted', 'Conversation Time Bomb Duration','Message Time Bomb Duration','Message Time Bomb Timestamp - UTC','Conversation Marked Favorite','Likes Count','Message Metadata Fragments')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Attachment']) #html_escape=False
					report.end_artifact_report()
					
					kmlactivity = 'Viber - Chats'
					kmlgen(report_folder, kmlactivity, data_list, data_headers)

					tsvname = 'Viber - Chats'
					tsv(report_folder, data_headers, data_list, tsvname)
				
				db.close()
			else:
				logfunc('No Viber data found.')
Exemplo n.º 13
0
def get_filesAppsdb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('server.db'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT * 
    FROM
    DEVICES
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:

        for row in all_rows:
            data_list.append((row[1], ))

        description = 'Device names that are able to sync to iCloud Drive.'
        report = ArtifactHtmlReport('Files App - iCloud Sync Names')
        report.start_artifact_report(report_folder,
                                     'Files App - iCloud Sync Names',
                                     description)
        report.add_script()
        data_headers = ('Name', )
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Files App - Filenames'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Files App - iCloud Sync Names data available')

    cursor.execute('''
    SELECT
    item_birthtime,
    item_filename,
    version_mtime
    FROM
    server_items
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            birthtime = datetime.datetime.fromtimestamp(row[0])
            versionmtime = datetime.datetime.fromtimestamp(row[2])
            data_list.append((birthtime, row[1], versionmtime))

        description = ''
        report = ArtifactHtmlReport('Files App - iCloud Server Items')
        report.start_artifact_report(report_folder,
                                     'Files App - iCloud Server Items',
                                     description)
        report.add_script()
        data_headers = ('Birthtime', 'Filename', 'Version Modified Time')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Files App - iCloud Server Items'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Files App - iCloud Server Items'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Files App - iCloud Server Items data available')
Exemplo n.º 14
0
def get_knowClocation(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Location on iOS " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
      	DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LATITUDE || ", " || ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LONGITUDE AS "COORDINATES",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LOCATIONNAME AS "NAME",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__DISPLAYNAME AS "DISPLAY NAME",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__FULLYFORMATTEDADDRESS AS "FORMATTED ADDRESS",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__CITY AS "CITY",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__STATEORPROVINCE AS "STATE/PROVINCE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__COUNTRY AS "COUNTRY",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__POSTALCODE_V2 AS "POSTAL CODE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__SUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__THOROUGHFARE AS "THOROUGHFARE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__PHONENUMBERS AS "PHONE NUMBERS",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__URL AS "URL",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__CONTENTDESCRIPTION AS "CONTENT DESCRIPTION",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYREQUIREDSTRING AS "USER ACTIVITY REQUIRED STRING",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LATITUDE AS "LATITUDE",
		ZSTRUCTUREDMETADATA.Z_DKLOCATIONAPPLICATIONACTIVITYMETADATAKEY__LONGITUDE AS "LONGITUDE",
		ZSOURCE.ZSOURCEID AS "SOURCE ID",
		ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYUUID AS "ACTIVITY UUID",
		ZSOURCE.ZITEMID AS "ITEM ID",
		ZSOURCE.ZSOURCEID AS "SOURCE ID",
      CASE ZOBJECT.ZSTARTDAYOFWEEK 
         WHEN "1" THEN "Sunday"
         WHEN "2" THEN "Monday"
         WHEN "3" THEN "Tuesday"
         WHEN "4" THEN "Wednesday"
         WHEN "5" THEN "Thursday"
         WHEN "6" THEN "Friday"
         WHEN "7" THEN "Saturday"
      END "DAY OF WEEK",
      ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
      DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
      ZOBJECT.ZUUID AS "UUID",
      ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
   FROM
      ZOBJECT 
      LEFT JOIN
         ZSTRUCTUREDMETADATA 
         ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
      LEFT JOIN
         ZSOURCE 
         ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
   WHERE
      ZSTREAMNAME = "/app/locationActivity" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13],
                 row[14], row[15], row[16], row[17], row[18], row[19], row[20],
                 row[21], row[22], row[23], row[24], row[25], row[26], row[27],
                 row[28], row[29], row[30]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Location Activity')
        report.start_artifact_report(report_folder, 'Location Activity',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Coordinates', 'Name',
                        'Display Name', 'Formatted Address', 'City',
                        'State/Province', 'Country', 'Postal Code',
                        'Subthoroughfare', 'Thoroughfare', 'Phone Numebers',
                        'URL', 'Activity Type', 'Content Description',
                        'User Activity Required String', 'Content URL',
                        'Unique ID', 'Latitude', 'Logitude', 'Source ID',
                        'Activity UUID', 'Item ID', 'Source ID',
                        'Day of the Week', 'GMT Offset', 'Entry Creation',
                        'UUID', 'Zonject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Location Activity'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Location Activity'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for KnowledgeC Location Activity')
Exemplo n.º 15
0
def get_ooklaSpeedtestData(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('speedtest.sqlite'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        datetime(("ZDATE")+strftime('%s', '2001-01-01 00:00:00'), 'unixepoch') as 'Date',
        "ZEXTERNALIP" as 'External IP Address',
        "ZINTERNALIP" as 'Internal IP Address',
        "ZCARRIERNAME" as 'Carrier Name',
        "ZISP" as 'ISP',
        "ZWIFISSID" as 'Wifi SSID',
        "ZWANTYPE" as 'WAN Type',
        CASE "ZDEVICEMODEL" 
            WHEN "iPad3,1"
                THEN "iPad 3rd Gen (Wi-Fi Only)"
            WHEN "iPad3,2"
                THEN "iPad 3rd Gen (Wi-Fi/Cellular Verizon/GPS)"
            WHEN "iPad3,3"
                THEN "iPad 3rd Gen (Wi-Fi/Cellular AT&T/GPS)"
            WHEN "iPad3,4"
                THEN "iPad 4th Gen (Wi-Fi Only)"
            WHEN "iPad3,5"
                THEN "iPad 4th Gen (Wi-Fi/AT&T/GPS)"
            WHEN "iPad3,6"
                THEN "iPad 4th Gen (Wi-Fi/Verizon & Sprint/GPS)"
            WHEN "iPad6,11"
                THEN "iPad 9.7 5th Gen (Wi-Fi Only)"
            WHEN "iPad6,12"
                THEN "iPad 9.7 5th Gen (Wi-Fi/Cellular)"
            WHEN "iPad7,5"
                THEN "iPad 9.7 6th Gen (Wi-Fi Only)"
            WHEN "iPad7,6"
                THEN "iPad 9.7 6th Gen (Wi-Fi/Cellular)"
            WHEN "iPad7,11"
                THEN "iPad 10.2 7th Gen (Wi-Fi Only)"
            WHEN "iPad7,12"
                THEN "iPad 10.2 7th Gen (Wi-Fi/Cellular Global)"
            WHEN "iPad11,3"
                THEN "iPad Air 3rd Gen (Wi-Fi Only)"
            WHEN "iPad11,4"
                THEN "iPad Air 3rd Gen (Wi-Fi+Cell)"
            WHEN "iPad2,5"
                THEN "iPad mini Wi-Fi Only/1st Gen"
            WHEN "iPad2,6"
                THEN "iPad mini Wi-Fi/AT&T/GPS - 1st Gen"
            WHEN "iPad2,7"
                THEN "iPad mini Wi-Fi/VZ & Sprint/GPS - 1st Gen"
            WHEN "iPad4,4"
                THEN "iPad mini 2 (Retina/2nd Gen Wi-Fi Only)"
            WHEN "iPad4,5"
                THEN "iPad mini 2 (Retina/2nd Gen Wi-Fi/Cellular)"
            WHEN "iPad4,7"
                THEN "iPad mini 3 (Wi-Fi Only)"
            WHEN "iPad4,8"
                THEN "iPad mini 3 (Wi-Fi/Cellular)"
            WHEN "iPad5,1"
                THEN "iPad mini 4 (Wi-Fi Only)"
            WHEN "iPad5,2"
                THEN "iPad mini 4 (Wi-Fi/Cellular)"
            WHEN "iPad11,1"
                THEN "iPad mini 5th Gen (Wi-Fi Only)"
            WHEN "iPad11,2"
                THEN "iPad mini 5th Gen (Wi-Fi+Cell)"
            WHEN "iPad6,7" 
                THEN "iPad Pro 12.9 (Wi-Fi Only)"
            WHEN "iPad6,8" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cellular)"
            WHEN "iPad6,3" 
                THEN "iPad Pro 9.7 (Wi-Fi Only)"
            WHEN "iPad6,4" 
                THEN "iPad Pro 9.7 (Wi-Fi/Cellular)"
            WHEN "iPad7,3" 
                THEN "iPad Pro 10.5 (Wi-Fi Only)"
            WHEN "iPad7,4" 
                THEN "iPad Pro 10.5 (Wi-Fi/Cellular)"
            WHEN "iPad7,1" 
                THEN "iPad Pro 12.9 (Wi-Fi Only - 2nd Gen)"
            WHEN "iPad7,2" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cell - 2nd Gen)"
            WHEN "iPad8,9" 
                THEN "iPad Pro 11 (Wi-Fi Only - 2nd Gen)"
            WHEN "iPad8,10" 
                THEN "iPad Pro 11 (Wi-Fi/Cell - 2nd Gen)"
            WHEN "iPad8,11" 
                THEN "iPad Pro 12.9 (Wi-Fi Only - 4th Gen)"
            WHEN "iPad8,12" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cell - 4th Gen)"
            WHEN "iPhone8,4" 
                THEN "iPhone SE (United States/A1662)"
            WHEN "iPhone9,1" 
                THEN "iPhone 7 (Verizon/Sprint/China/A1660)"
            WHEN "iPhone9,3" 
                THEN "iPhone 7 (AT&T/T-Mobile/A1778)"
            WHEN "iPhone9,2" 
                THEN "iPhone 7 Plus (Verizon/Sprint/China/A1661)"
            WHEN "iPhone9,4" 
                THEN "iPhone 7 Plus (AT&T/T-Mobile/A1784)"
            WHEN "iPhone10,1" 
                THEN "iPhone 8 (Verizon/Sprint/China/A1863)"
            WHEN "iPhone10,4" 
                THEN "iPhone 8 (AT&T/T-Mobile/Global/A1905)"
            WHEN "iPhone10,2" 
                THEN "iPhone 8 Plus (Verizon/Sprint/China/A1864)"
            WHEN "iPhone10,5" 
                THEN "iPhone 8 Plus (AT&T/T-Mobile/Global/A1897)"
            WHEN "iPhone10,3" 
                THEN "iPhone X (Verizon/Sprint/China/A1865)"
            WHEN "iPhone10,6" 
                THEN "iPhone X (AT&T/T-Mobile/Global/A1901)"
            WHEN "iPhone11,2" 
                THEN "iPhone Xs (A1920/A2097/A2098/A2100)"
            WHEN "iPhone11,6" 
                THEN "iPhone Xs Max (A1921/A2101/A2101/A2104)"
            WHEN "iPhone11,8" 
                THEN "iPhone XR (A1984/A2105/A2106/A2108)"
            WHEN "iPhone12,1" 
                THEN "iPhone 11 (A2111/A2221/A2223)"
            WHEN "iPhone12,3" 
                THEN "iPhone 11 Pro (A2160/A2215/A2217)"
            WHEN "iPhone12,5" 
                THEN "iPhone 11 Pro Max (A2161/A2218/A2220)"
                    ELSE "ZDEVICEMODEL"
            END 'Device Model',
        "ZLATITUDE" as 'Latitude',
        "ZLONGITUDE" as 'Longitude',
        "ZHORIZONTALACCURACY" as 'Accuracy in Meters'
        FROM ZSPEEDTESTRESULT
        
        ORDER BY "ZDATE" DESC	
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10]))

            description = ''
            report = ArtifactHtmlReport('Applications')
            report.start_artifact_report(report_folder, 'Ookla Speedtest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'External IP Address',
                            'Internal IP Address', 'Carrier Name', 'ISP',
                            'Wifi SSID', 'WAN Type', 'Device Model',
                            'Latitude', 'Longitude', 'Accuracy in Meters')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Ookla Speedtest Data'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Ookla Speedtest Data'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Ookla Speedtest Data'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No Ookla Speedtest Application data available')

        db.close()
        return
Exemplo n.º 16
0
def get_chromeCookies(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'Cookies':  # skip -journal and other files
            continue
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        CASE
            last_access_utc 
            WHEN
                "0" 
            THEN
                "" 
            ELSE
                datetime(last_access_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        END AS "last_access_utc", 
        host_key,
        name,
        value,
        CASE
            creation_utc 
            WHEN
                "0" 
            THEN
                "" 
            ELSE
                datetime(creation_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        END AS "creation_utc", 
        CASE
            expires_utc 
            WHEN
                "0" 
            THEN
                "" 
            ELSE
                datetime(expires_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        END AS "expires_utc", 
        path
        FROM
        cookies
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} Cookies')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} Cookies.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Access Date', 'Host', 'Name', 'Value',
                            'Created Date', 'Expiration Date', 'Path')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append(
                        (row[0], row[1], (textwrap.fill(row[2], width=50)),
                         row[3], row[4], row[5], row[6]))
                else:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} cookies'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} Cookies'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} cookies data available')

        db.close()
Exemplo n.º 17
0
def get_mobileActivationLogs(files_found, report_folder, seeker):
    data_list = []
    data_list_info = []

    source_files = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.startswith('\\\\?\\'):
            file_name = pathlib.Path(file_found[4:]).name
            source_files.append(file_found[4:])
        else:
            file_name = pathlib.Path(file_found).name
            source_files.append(file_found)

        with open(file_found, 'r') as fp:
            data = fp.readlines()
            linecount = 0
            hitcount = 0
            activationcount = 0

            for line in data:
                linecount += 1
                date_filter = re.compile(
                    r'(([A-Za-z]+[\s]+([a-zA-Z]+[\s]+[0-9]+)[\s]+([0-9]+\:[0-9]+\:[0-9]+)[\s]+([0-9]{4}))([\s]+[\[\d\]]+[\s]+[\<a-z\>]+[\s]+[\(\w\)]+[\s]+[A-Z]{2}\:[\s]+)([main\:\s]*.*)$)'
                )
                line_match = re.match(date_filter, line)

                if line_match:
                    date_time = (line_match.group(3, 5, 4))
                    conv_time = ' '.join(date_time)
                    dtime_obj = datetime.strptime(conv_time,
                                                  '%b %d %Y %H:%M:%S')
                    ma_datetime = str(dtime_obj)
                    values = line_match.group(7)

                    if 'perform_data_migration' in values:
                        hitcount += 1
                        upgrade_match = re.search((
                            r'((.*)(Upgrade\s+from\s+[\w]+\s+to\s+[\w]+\s+detected\.$))'
                        ), values)
                        if upgrade_match:
                            upgrade = upgrade_match.group(3)
                            data_list.append((ma_datetime, upgrade, file_name))

                    if '____________________ Mobile Activation Startup _____________________' in values:
                        activationcount += 1
                        ma_startup_line = str(linecount)
                        ma_startup = (
                            f'Mobile Activation Startup at line: {ma_startup_line}'
                        )
                        data_list.append((ma_datetime, ma_startup, file_name))

                upgrade_entries = (
                    f'Found {hitcount} Upgrade entries in {file_name}')
                boot_entries = (
                    f'Found {activationcount} Mobile Activation entries in {file_name}'
                )
            data_list_info.append((boot_entries, upgrade_entries))

    report = ArtifactHtmlReport('Mobile Activation Logs')
    report.start_artifact_report(report_folder, 'Mobile Activation Logs')
    report.add_script()
    data_headers_info = ('Mobile Activation', 'Upgrade')
    data_headers = ('Datetime', 'Event', 'Log Name')

    source_files_found = ', '.join(source_files)

    report.write_artifact_data_table(data_headers_info,
                                     data_list_info,
                                     source_files_found,
                                     cols_repeated_at_bottom=False)
    report.write_artifact_data_table(data_headers, data_list,
                                     source_files_found, True, False)
    report.end_artifact_report()

    tsvname = 'Mobile Activation Logs'
    tsv(report_folder, data_headers, data_list, tsvname)
    tsv(report_folder, data_headers_info, data_list_info, tsvname)
Exemplo n.º 18
0
def process_siminfo(folder, uid, report_folder):

    #Query to create report
    db = open_sqlite_db_readonly(folder)
    cursor = db.cursor()

    #Query to create report
    try:
        cursor.execute('''
        SELECT
            number,
            imsi,
            display_name,
            carrier_name,
            iso_country_code,
            carrier_id,
            icc_id
        FROM
            siminfo
        ''')
    except:
        cursor.execute('''
        SELECT
            number,
            card_id,
            display_name,
            carrier_name,
            carrier_name,
            carrier_name,
            icc_id
        FROM
            siminfo
        ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Device Info')
        report.start_artifact_report(report_folder, f'SIM_info_{uid}')
        report.add_script()
        data_headers = ('Number', 'IMSI', 'Display Name', 'Carrier Name',
                        'ISO Code', 'Carrier ID', 'ICC ID')

        data_list = []
        for row in all_rows:
            if row[3] == row[4]:
                row1 = ''
                row4 = ''
                row5 = ''
            else:
                row1 = row[1]
                row4 = row[4]
                row5 = row[5]
            data_list.append(
                (row[0], row1, row[2], row[3], row4, row5, row[6]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()

        tsvname = f'sim info {uid}'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No SIM_Info{uid} data available')
    db.close()
Exemplo n.º 19
0
def get_chromeLoginData(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'Login Data':  # skip -journal and other files
            continue
        browser_name = 'Chrome'
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        username_value,
        password_value,
        CASE date_created 
            WHEN "0" THEN "" 
            ELSE datetime(date_created / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
            END AS "date_created_win_epoch", 
        CASE date_created WHEN "0" THEN "" 
            ELSE datetime(date_created / 1000000 + (strftime('%s', '1970-01-01')), "unixepoch")
            END AS "date_created_unix_epoch",
        origin_url,
        blacklisted_by_user
        FROM logins
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} Login Data')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} Login Data.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Created Time', 'Username', 'Password',
                            'Origin URL', 'Blacklisted by User')
            data_list = []
            for row in all_rows:
                password = ''
                password_enc = row[1]
                if password_enc:
                    password = decrypt(password_enc).decode("utf-8", 'replace')
                valid_date = get_valid_date(row[2], row[3])
                data_list.append(
                    (valid_date, row[0], password, row[4], row[5]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} login data'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} Login Data'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} Login Data available')

        db.close()
Exemplo n.º 20
0
def get_powerlogAll(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            DATETIME(TIMESTAMPLOGGED, 'UNIXEPOCH') AS "TIMESTAMP LOGGED",
            APPLICATIONNAME AS "APPLICATION NAME / BUNDLE ID",
            ASSERTIONID AS "ASERTION ID",
            ASSERTIONNAME AS "ASSERTION NAME",
            AUDIOROUTE AS "AUDIO ROUTE",
            MIRRORINGSTATE AS "MIRRORING STATE",
            OPERATION,
            PID,
            ID AS "PLAUDIOAGENT_EVENTPOINT_AUDIOAPP TABLE ID" 
            FROM
            PLAUDIOAGENT_EVENTPOINT_AUDIOAPP
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog Audio Routing via App')
                report.start_artifact_report(report_folder, 'Audio Routing')
                report.add_script()
                data_headers = ('Timestamp', 'Timestamped Logged', 'Bundle ID',
                                'Assertion Name', 'Audio Route',
                                'Mirroring State', 'Operation', 'PID',
                                'Audio App Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Audio Routing via App'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Airdop Connection Info')

    if version.parse(iOSversion) >= version.parse("10"):
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            BULLETINBUNDLEID AS "BULLETIN BUNDLE ID",
            TIMEINTERVAL / 60 AS "TIME INTERVAL IN SECONDS",
            COUNT AS "COUNT",
            POSTTYPE AS "POST TYPE",
            ID AS "PLSPRINGBOARDAGENT_AGGREGATE_SBBULLETINS_AGGREGATE TABLE ID" 
        FROM
            PLSPRINGBOARDAGENT_AGGREGATE_SBBULLETINS_AGGREGATE
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            report = ArtifactHtmlReport('Powerlog Aggregate Bulletins')
            report.start_artifact_report(report_folder, 'Aggregate Bulletins')
            report.add_script()
            data_headers = ('Timestamp', 'Bulletin Bundle ID',
                            'Time Interval in Seconds', 'Count', 'Post Type',
                            'Aggregate Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Agg Bulletins'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Aggregate Bulletins')

    if version.parse(iOSversion) >= version.parse("10"):
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            NOTIFICATIONBUNDLEID AS "BULLETIN BUNDLE ID",
            TIMEINTERVAL / 60 AS "TIME INTERVAL IN SECONDS",
            COUNT AS "COUNT",
            NOTIFICATIONTYPE AS "NOTIFICATION TYPE",
            ID AS "PLSPRINGBOARDAGENT_AGGREGATE_SBNOTIFICATIONS_AGGREGATE TABLE ID" 
        FROM
            PLSPRINGBOARDAGENT_AGGREGATE_SBNOTIFICATIONS_AGGREGATE 
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            report = ArtifactHtmlReport('Powerlog Aggregate Notifications')
            report.start_artifact_report(report_folder,
                                         'Aggregate Notifications')
            report.add_script()
            data_headers = ('Timestamp', 'Notification Bundle ID',
                            'Time Interval in Seconds', 'Count',
                            'Notification Type', 'Aggregate Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Agg Notifications'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Aggregate Notifications')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(AIRDROP_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
                STATE,
                SUBEVENT,
                BUNDLEID AS BUNDLE_ID,
                PID,
                DATETIME(AIRDROP_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_AIRDROP_TIMESTAMP,
                DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
                SYSTEM AS TIME_OFFSET,
                AIRDROP_ID AS "PLXPCAGENT_EVENTFORWARD_AIRDROP TABLE ID"
            FROM
                (
                SELECT
                    BUNDLEID,
                    AIRDROP_ID,
                    AIRDROP_TIMESTAMP,
                    TIME_OFFSET_TIMESTAMP,
                    MAX(TIME_OFFSET_ID) AS MAX_ID,
                    SYSTEM,
                    PID,
                    SUBEVENT,
                    STATE
                FROM
                    (
                SELECT
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.TIMESTAMP AS AIRDROP_TIMESTAMP,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.BUNDLEID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.PID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.SUBEVENT,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.STATE,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.ID AS "AIRDROP_ID",
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM,
                    BUNDLEID 
                FROM
                    PLXPCAGENT_EVENTFORWARD_AIRDROP 
                LEFT JOIN
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
                    )
                AS AIRDROPSTATE 
                GROUP BY
                    AIRDROP_ID 
                )
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport(
                    'Powerlog Airdrop Connections Info')
                report.start_artifact_report(report_folder,
                                             'Airdrop Connections Info')
                report.add_script()
                data_headers = ('Adjusted Timestamp', 'State', 'Subevent',
                                'Bundle ID', 'PID',
                                'Original Airdrop Timestamp',
                                'Offset Timestamp', 'Time Offset',
                                'Airdrop Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Airdrop Connections Info'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Airdop Connection Info')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            APPNAME AS "APP NAME",
            APPEXECUTABLE AS "APP EXECUTABLE NAME",
            APPBUNDLEID AS "BUNDLE ID",
            APPBUILDVERSION AS "APP BUILD VERSION",
            APPBUNDLEVERSION AS "APP BUNDLE VERSION",
            APPTYPE AS "APP TYPE",
            CASE APPDELETEDDATE 
                WHEN 0 THEN "NOT DELETED" 
                ELSE DATETIME(APPDELETEDDATE, 'UNIXEPOCH') 
            END "APP DELETED DATE",
            ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
        FROM
            PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog App Info')
                report.start_artifact_report(report_folder, 'App Info')
                report.add_script()
                data_headers = ('Timestamp', 'App Name', 'App Executable Name',
                                'Bundle ID', 'App Build Version',
                                'App Bundle Version', 'App TYpe',
                                'App Deleted Date', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog App Info'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Powerlog App Info')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            APPNAME AS "APP NAME",
            APPEXECUTABLE AS "APP EXECUTABLE NAME",
            APPBUNDLEID AS "BUNDLE ID",
            APPBUILDVERSION AS "APP BUILD VERSION",
            APPBUNDLEVERSION AS "APP BUNDLE VERSION",
            APPTYPE AS "APP TYPE",
            CASE APPDELETEDDATE 
                WHEN 0 THEN "NOT DELETED" 
                ELSE DATETIME(APPDELETEDDATE, 'UNIXEPOCH') 
            END "APP DELETED DATE",
            ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
        FROM
            PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog App Info')
                report.start_artifact_report(report_folder, 'App Info')
                report.add_script()
                data_headers = ('Timestamp', 'App Name', 'App Executable Name',
                                'Bundle ID', 'App Build Version',
                                'App Bundle Version', 'App TYpe',
                                'App Deleted Date', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog App Info'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Powerlog App Info')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            DATETIME(TIMESTAMPLOGGED, 'UNIXEPOCH') AS "TIMESTAMP LOGGED",
            APPLICATIONNAME AS "APPLICATION NAME / BUNDLE ID",
            ASSERTIONID AS "ASERTION ID",
            ASSERTIONNAME AS "ASSERTION NAME",
            AUDIOROUTE AS "AUDIO ROUTE",
            MIRRORINGSTATE AS "MIRRORING STATE",
            OPERATION,
            PID,
            ID AS "PLAUDIOAGENT_EVENTPOINT_AUDIOAPP TABLE ID" 
            FROM
            PLAUDIOAGENT_EVENTPOINT_AUDIOAPP
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog Audio Routing via App')
                report.start_artifact_report(report_folder, 'Audio Routing')
                report.add_script()
                data_headers = ('Timestamp', 'Timestamped Logged', 'Bundle ID',
                                'Assertion Name', 'Audio Route',
                                'Mirroring State', 'Operation', 'PID',
                                'Audio App Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Audio Routing via App'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Audio Routing Info')

    if version.parse(iOSversion) >= version.parse("11"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            DATETIME(START, 'UNIXEPOCH') AS "START",
            DATETIME(END, 'UNIXEPOCH') AS "END",
            STATE AS "STATE",
            FINISHED AS "FINISHED",
            HASERROR AS "HAS ERROR",
            ID AS "PLXPCAGENT_EVENTPOINT_MOBILEBACKUPEVENTS TABLE ID" 
        FROM
            PLXPCAGENT_EVENTPOINT_MOBILEBACKUPEVENTS
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("11"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6]))

                report = ArtifactHtmlReport('Powerlog Backup Info')
                report.start_artifact_report(report_folder, 'Backup Info')
                report.add_script()
                data_headers = ('Timestamp', 'Start', 'End', 'State',
                                'Finished', 'Has error', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Backup Info'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Powerlog Backup Info')

    if version.parse(iOSversion) >= version.parse("11"):
        cursor = db.cursor()
        cursor.execute("""
        SELECT
                DATETIME(APPDELETEDDATE, 'UNIXEPOCH') AS "APP DELETED DATE",
                DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
                APPNAME AS "APP NAME",
                APPEXECUTABLE AS "APP EXECUTABLE NAME",
                APPBUNDLEID AS "BUNDLE ID",
                APPBUILDVERSION AS "APP BUILD VERSION",
                APPBUNDLEVERSION AS "APP BUNDLE VERSION",
                APPTYPE AS "APP TYPE",
                ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
            FROM
                PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS 
            WHERE
                APPDELETEDDATE > 0

        """)
    elif version.parse(iOSversion) == version.parse("10"):
        cursor = db.cursor()
        cursor.execute("""
        SELECT
                DATETIME(APPDELETEDDATE, 'UNIXEPOCH') AS "APP DELETED DATE",
                DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
                APPNAME AS "APP NAME",
                APPEXECUTABLE AS "APP EXECUTABLE NAME",
                APPBUNDLEID AS "BUNDLE ID",
                APPBUILDVERSION AS "APP BUILD VERSION",
                APPBUNDLEVERSION AS "APP BUNDLE VERSION",
                --APPTYPE AS "APP TYPE",
                ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
            FROM
                PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS 
            WHERE
                APPDELETEDDATE > 0
        """)
    elif version.parse(iOSversion) == version.parse("9"):
        cursor = db.cursor()
        cursor.execute("""
        SELECT
                DATETIME(APPDELETEDDATE, 'UNIXEPOCH') AS "APP DELETED DATE",
                DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
                APPNAME AS "APP NAME",
                APPBUNDLEID AS "BUNDLE ID",
                ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
            FROM
                PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS 
            WHERE
                APPDELETEDDATE > 0
        """)
    else:
        logfunc("Unsupported version for Powerlog Deleted Apps iOS version: " +
                iOSversion)

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        if version.parse(iOSversion) >= version.parse("11"):
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report = ArtifactHtmlReport('Powerlog Deleted Apps')
            report.start_artifact_report(report_folder, 'Deleted Apps')
            report.add_script()
            data_headers = ('App Deleted Date', 'Timestamp', 'App Name',
                            'App Executable Name', 'Bundle ID',
                            'App Build Version', 'App Bundle Version',
                            'App Type', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Deleted Apps'
            tsv(report_folder, data_headers, data_list, tsvname)

        elif version.parse(iOSversion) == version.parse("10"):
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report = ArtifactHtmlReport('Powerlog Deleted Apps')
            report.start_artifact_report(report_folder, 'Deleted Apps')
            report.add_script()
            data_headers = ('App Deleted Date', 'Timestamp', 'App Name',
                            'App Executable Name', 'Bundle ID',
                            'App Build Version', 'App Bundle Version',
                            'App Type', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Deleted Apps'
            tsv(report_folder, data_headers, data_list, tsvname)

        elif version.parse(iOSversion) == version.parse("9"):
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report = ArtifactHtmlReport('Powerlog Deleted Apps')
            report.start_artifact_report(report_folder, 'Deleted Apps')
            report.add_script()
            data_headers = ('App Deleted Date', 'Timestamp', 'App Name',
                            'Bundle ID', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Deleted Apps'
            tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in Powerlog Deleted Apps')

    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        DATETIME(LIGHTNINGCONNECTOR_TIMESTAMP + SYSTEM, 'UNIXEPOCH','LOCALTIME') AS ADJUSTED_TIMESTAMP,
        CASE IOACCESSORYPOWERMODE 
            WHEN "1" THEN "UNPLUGGED" 
            WHEN "3" THEN "PLUGGED IN" 
        END  AS "IO ACCESSORY POWER MODE",
        DATETIME(LIGHTNINGCONNECTOR_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_LIGHTNINGCONNECTOR_TIMESTAMP,
        DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
        SYSTEM AS TIME_OFFSET,
        LIGHTNINGCONNECTOR_ID AS "PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS TABLE ID" 
    	FROM
        (
        SELECT
            LIGHTNINGCONNECTOR_ID,
            LIGHTNINGCONNECTOR_TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            IOACCESSORYPOWERMODE,
            SYSTEM
        FROM
            (
            SELECT
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS.TIMESTAMP AS LIGHTNINGCONNECTOR_TIMESTAMP,
                IOACCESSORYPOWERMODE,
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS.ID AS "LIGHTNINGCONNECTOR_ID" ,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLBATTERYAGENT_EVENTFORWARD_LIGHTNINGCONNECTORSTATUS
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
            )
            AS LIGHTNINGCONNECTOR_STATE 
        GROUP BY
            LIGHTNINGCONNECTOR_ID 
        )
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append(
                        (row[0], row[1], row[2], row[3], row[4], row[5]))

                report = ArtifactHtmlReport(
                    'Powerlog Lightning Connector Status')
                report.start_artifact_report(report_folder,
                                             'Lightning Connector Status')
                report.add_script()
                data_headers = ('Adjusted Timestamp', 'Accesory Power Mode',
                                'Original Lightnint Connector Timestamp',
                                'Offset Timestamp', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Lightning Connector Status'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Powerlog Lightning Connector Status')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(LOCATIONAGENT_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
            DATETIME(TIMESTAMPLOGGED+ SYSTEM, 'UNIXEPOCH') AS "TIMESTAMP LOGGED (ADJ)",
            DATETIME(TIMESTAMPEND + SYSTEM, 'UNIXEPOCH') AS "TIMESTAMP END (ADJ)",
            BUNDLEID AS "BUNDLE ID",
            TYPE AS "TYPE",
            LOCATIONDESIREDACCURACY AS "LOCATION DESIRED ACCURACY",
            LOCATIONDISTANCEFILTER AS "LOCATION DISTANCE FILTER",
            CLIENT AS "CLIENT",
            EXECUTABLE AS "EXECUTABLE",
            DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
            SYSTEM AS TIME_OFFSET,
            LOCATIONAGENT_ID AS "PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS TABLE ID" 
        	FROM
            (
            SELECT
                LOCATIONAGENT_ID,
                LOCATIONAGENT_TIMESTAMP,
                TIME_OFFSET_TIMESTAMP,
                MAX(TIME_OFFSET_ID) AS MAX_ID,
                TIMESTAMPEND,
                TIMESTAMPLOGGED,
                BUNDLEID,
                TYPE,
                LOCATIONDESIREDACCURACY,
                LOCATIONDISTANCEFILTER,
                CLIENT,
                EXECUTABLE,
                SYSTEM
            FROM
                (
                SELECT
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS.TIMESTAMP AS LOCATIONAGENT_TIMESTAMP,
                    TIMESTAMPEND,
                    TIMESTAMPLOGGED,
                    BUNDLEID,
                    TYPE,
                    LOCATIONDESIREDACCURACY,
                    LOCATIONDISTANCEFILTER,
                    CLIENT,
                    EXECUTABLE,
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS.ID AS "LOCATIONAGENT_ID" ,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
                FROM
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS
                LEFT JOIN
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET
                )
                AS LOCATIONAGENT_STATE 
            GROUP BY
                LOCATIONAGENT_ID 
            )        
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append(
                        (row[0], row[1], row[2], row[3], row[4], row[5],
                         row[6], row[7], row[8], row[9], row[10], row[11]))

                report = ArtifactHtmlReport('Powerlog Location Use by App')
                report.start_artifact_report(report_folder,
                                             'Location Use by App')
                report.add_script()
                data_headers = ('Adjusted Timestamp', 'Timestamp Logged',
                                'Timestamp End', 'Bundle ID', 'Type',
                                'Location Desired Accuracy',
                                'Location Distance Filter', 'Client',
                                'Executable', 'Offset Timestamp',
                                'Time Offset', 'Client Status Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Location Use by App'
                tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No data available in Location Use by App')

    if version.parse(iOSversion) >= version.parse("10"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            BUILD,
            DEVICE,
            HWMODEL,
            PAIRINGID AS "PAIRING ID",
            ID AS "PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG TABLE ID" 
        FROM
            PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG
        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            BUILD,
            DEVICE,
            ID AS "PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG TABLE ID" 
        FROM
            PLCONFIGAGENT_EVENTNONE_PAIREDDEVICECONFIG
        ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []

        if version.parse(iOSversion) >= version.parse("10"):
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            report = ArtifactHtmlReport('Powerlog Paired Device Configuration')
            report.start_artifact_report(report_folder,
                                         'Paired Device Configuration')
            report.add_script()
            data_headers = ('Timestamp', 'Build', 'Device', 'HW Model',
                            'Pairing ID', 'PairedDeviceConfig Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Paired Device Conf'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report = ArtifactHtmlReport('Powerlog Paired Device Configuration')
            report.start_artifact_report(report_folder,
                                         'Paired Device Configuration')
            report.add_script()
            data_headers = ('Timestamp', 'Build', 'Device',
                            'PairedDeviceConfig Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Paired Device Conf'
            tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
                DATETIME(TIMESTAMPEND + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_END_TIMESTAMP,
                BUNDLENAME AS 'BUNDLE ID',
                PROCESSNAME AS 'PROCESS NAME',
                CELLIN AS 'CELL IN',
                CELLOUT AS 'CELL OUT',
                WIFIIN AS 'WIFI IN',
                WIFIOUT AS 'WIFI OUT',
                DATETIME(TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_TIMESTAMP,
                DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
                SYSTEM AS TIME_OFFSET,
                TABLE_ID AS "PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF TABLE ID"
            FROM
            (
            SELECT
                TABLE_ID,
                TIMESTAMP,
                TIME_OFFSET_TIMESTAMP,
                MAX(TIME_OFFSET_ID) AS MAX_ID,
                TIMESTAMPEND,
                BUNDLENAME,
                PROCESSNAME,
                CELLIN,
                CELLOUT,
                WIFIIN,
                WIFIOUT,
                SYSTEM
            FROM
            (
            SELECT
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.TIMESTAMP,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.TIMESTAMPEND,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.BUNDLENAME,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.PROCESSNAME,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.CELLIN,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.CELLOUT,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.WIFIIN,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.WIFIOUT,
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.ID AS "TABLE_ID",
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF 
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
            )
            GROUP BY
            TABLE_ID 
                 )
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11]))

            report = ArtifactHtmlReport('Powerlog Process Data Usage')
            report.start_artifact_report(report_folder, 'Process Data Usage')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Adjusted End Timestamp',
                            'Bundle ID', 'Process Name', 'Cell In', 'Cell Out',
                            'WiFI In', 'WiFi Out', 'Original Timestamp',
                            'Offset Timestamp', 'Time Offset',
                            'Usage Diff Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Process Data Usage'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in table')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
            BUNDLEID AS 'BUNDLE ID',
            CONNECTIONTYPE AS 'CONNECTION TYPE',
            ISDROPPED AS 'IS DROPPED',
            LINKQUALITY AS 'LINK QUALITY',
            PRIORITY AS 'PRIORITY',
            TOPIC AS 'TOPIC',
            SERVERHOSTNAME AS 'SERVERHOSTNAME',
            SERVERIP AS 'SERVER IP',
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_TIMESTAMP,
            DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
            SYSTEM AS TIME_OFFSET,
            TABLE_ID AS "PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH TABLE ID"
        FROM
        (
        SELECT
            TABLE_ID,
            TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            BUNDLEID,
            CONNECTIONTYPE,
            ISDROPPED,
            LINKQUALITY,
            PRIORITY,
            TOPIC,
            SERVERHOSTNAME,
            SERVERIP,
            SYSTEM
        FROM
        (
        SELECT
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.TIMESTAMP,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.BUNDLEID,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.CONNECTIONTYPE,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.ISDROPPED,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.LINKQUALITY,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.PRIORITY,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.TOPIC,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.SERVERHOSTNAME,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.SERVERIP,
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH.ID AS "TABLE_ID",
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
        FROM
            PLPUSHAGENT_EVENTPOINT_RECEIVEDPUSH 
        LEFT JOIN
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
        )
        GROUP BY
        TABLE_ID 
            )
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12]))

            report = ArtifactHtmlReport('Powerlog Push Message Received')
            report.start_artifact_report(report_folder,
                                         'Push Message Received')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Bundle ID',
                            'Connection Type', 'Is Dropped', 'Link Quality',
                            'Priority', 'Topic', 'Server Hostname',
                            'Server IP', 'Original Timestamp',
                            'Offset Timestamp', 'Time Offset',
                            'Aggregate Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Push Message Received'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in table')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
         SELECT
        DATETIME(TIMEZONE_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
        TIMEZONENAME AS "TIME ZONE NAME",
        COUNTRYCODE AS "COUNTRY CODE",
        LOCALEID AS "LOCALE ID",
        SECONDSFROMGMT / 3600 AS "SECONDS FROM GMT",
        TIMEZONEISINDST AS "TIME ZONE IN DST",
        TRIGGER AS "TRIGGER",
        DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
        SYSTEM AS TIME_OFFSET,
        TIMEZONE_ID AS "PLLOCALEAGENT_EVENTFORWARD_TIMEZONE TABLE ID" 
    	FROM
        (
        SELECT
            TIMEZONE_ID,
            TIMEZONE_TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            TIMEZONENAME,
            COUNTRYCODE,
            LOCALEID,
            SECONDSFROMGMT,
            TIMEZONEISINDST,
            TRIGGER,
            SYSTEM
        FROM
            (
            SELECT
                PLLOCALEAGENT_EVENTFORWARD_TIMEZONE.TIMESTAMP AS TIMEZONE_TIMESTAMP,
                TIMEZONENAME,
                COUNTRYCODE,
                LOCALEID,
                SECONDSFROMGMT,
                TIMEZONEISINDST,
                TRIGGER,
                PLLOCALEAGENT_EVENTFORWARD_TIMEZONE.ID AS "TIMEZONE_ID" ,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLLOCALEAGENT_EVENTFORWARD_TIMEZONE
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET
            )
            AS TIMEZONE_STATE 
        GROUP BY
            TIMEZONE_ID 
        )
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            report = ArtifactHtmlReport('Powerlog Timezones')
            report.start_artifact_report(report_folder, 'Timezones')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Timezone Name',
                            'Country Code', 'Locale ID', 'Seconds from GMT',
                            'Timezone in DTS', 'Trigger', 'Offset Timestamp',
                            'Time Offset', 'Timezon Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Timezones'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Powerlog Timezones')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TORCH_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
            BUNDLEID AS BUNDLE_ID,
            CASE LEVEL
            WHEN "0" THEN "OFF"
            WHEN "1" THEN "ON"
            END AS STATUS,
            DATETIME(TORCH_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_TORCH_TIMESTAMP,
            DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
            SYSTEM AS TIME_OFFSET,
        TORCH_ID
        FROM
            (
            SELECT
                BUNDLEID,
                TORCH_ID,
                TORCH_TIMESTAMP,
                TIME_OFFSET_TIMESTAMP,
                MAX(TIME_OFFSET_ID) AS MAX_ID,
                SYSTEM,
                LEVEL
            FROM
                    (
                    SELECT
                        PLCAMERAAGENT_EVENTFORWARD_TORCH.TIMESTAMP AS TORCH_TIMESTAMP,
                        PLCAMERAAGENT_EVENTFORWARD_TORCH.BUNDLEID,
                        PLCAMERAAGENT_EVENTFORWARD_TORCH.LEVEL,
                        PLCAMERAAGENT_EVENTFORWARD_TORCH.ID AS "TORCH_ID",
                        PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                        PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                        PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM,
                        BUNDLEID 
                    FROM
                        PLCAMERAAGENT_EVENTFORWARD_TORCH 
                    LEFT JOIN
                        PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
                    ) 
                    AS TORCHESTATE 
            GROUP BY
                TORCH_ID 
            )
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report = ArtifactHtmlReport('Powerlog Torch')
            report.start_artifact_report(report_folder, 'Torch')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Bundle ID', 'Status',
                            'Original Torch Timestamp', 'Offset Timestamp',
                            'Time Offset', 'Torch ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Torch'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Powerlog Torch')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        DATETIME(VIDEO_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
        CLIENTDISPLAYID AS "CLIENT DISPLAY ID",
        STATE,
        CLIENTPID AS "CLIENT PID",
        DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
        SYSTEM AS TIME_OFFSET,
        VIDEO_ID AS "PLVIDEOAGENT_EVENTFORWARD_VIDEO TABLE ID" 
    	FROM
        (
        SELECT
            VIDEO_ID,
            VIDEO_TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            CLIENTDISPLAYID,
            STATE,
            CLIENTPID,
            SYSTEM
        FROM
            (
            SELECT
                PLVIDEOAGENT_EVENTFORWARD_VIDEO.TIMESTAMP AS VIDEO_TIMESTAMP,
                CLIENTDISPLAYID,
                STATE,
                CLIENTPID,
                PLVIDEOAGENT_EVENTFORWARD_VIDEO.ID AS "VIDEO_ID" ,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLVIDEOAGENT_EVENTFORWARD_VIDEO
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET
            )
            AS VIDEO_STATE 
        GROUP BY
            VIDEO_ID 
        )
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report = ArtifactHtmlReport('Powerlog App Playing Video')
            report.start_artifact_report(report_folder, 'App Playing Video')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Client Display ID', 'State',
                            'Client PID', 'Offset Timestamp', 'Time Offset',
                            'Event Forward Video Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog App Playing Video'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Powerlog App Playing Video')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
        VOLUME,
        CASE MUTED 
            WHEN "0" THEN "NO" 
            WHEN "1" THEN "YES" 
        END "MUTED", 
        ID AS "PLAUDIOAGENT_EVENTFORWARD_OUTPUT TABLE ID" 
        FROM
        PLAUDIOAGENT_EVENTFORWARD_OUTPUT
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report = ArtifactHtmlReport('Powerlog Volume')
            report.start_artifact_report(report_folder, 'Volume')
            report.add_script()
            data_headers = ('Timestamp', 'Volume', 'Muted',
                            'Event Forward Output Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Volume'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Powerlog Volume')

    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        DATETIME(WIFIPROPERTIES_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
        CURRENTSSID,
        CURRENTCHANNEL,
        DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
        SYSTEM AS TIME_OFFSET,
        WIFIPROPERTIES_ID AS "PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES TABLE ID" 
    	FROM
        (
        SELECT
            WIFIPROPERTIES_ID,
            WIFIPROPERTIES_TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            CURRENTSSID,
            CURRENTCHANNEL,
            SYSTEM
        FROM
            (
            SELECT
                PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.TIMESTAMP AS WIFIPROPERTIES_TIMESTAMP,
                CURRENTSSID,
                CURRENTCHANNEL,
                PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.ID AS "WIFIPROPERTIES_ID" ,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
            FROM
                PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES
            LEFT JOIN
                PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
            )
            AS WIFIPROPERTIES_STATE 
        GROUP BY
            WIFIPROPERTIES_ID 
        )    
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            report = ArtifactHtmlReport('Powerlog WiFi Network Connections')
            report.start_artifact_report(report_folder,
                                         'WiFi Network Connections')
            report.add_script()
            data_headers = ('Adjusted Timestamp', 'Current SSID',
                            'Current Channel', 'Offset Timestamp',
                            'Time Offset', 'Cummilative Prop. Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Powerlog Wifi Network Connections'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc('No data available in Powerlog WiFi Network Connections')
Exemplo n.º 21
0
def get_teams(files_found, report_folder, seeker):
    CacheFile = 0
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.sqlite'):
            databasedata = file_found
        if file_found.endswith('CacheFile'):
            CacheFile = file_found

    if CacheFile != 0:
        with open(CacheFile, 'rb') as nsfile:
            nsplist = nd.deserialize_plist(nsfile)

    db = open_sqlite_db_readonly(databasedata)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime('2001-01-01', "ZARRIVALTIME" || ' seconds'),
    ZIMDISPLAYNAME,
    ZCONTENT
    from ZSMESSAGE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            thumb = ''
            if '<div><img src=' in row[2]:
                matches = re.search('"([^"]+)"', row[2])
                imageURL = (matches[0].strip('\"'))
                if imageURL in nsplist.keys():
                    data_file_real_path = nsplist[imageURL]
                    for match in files_found:
                        if data_file_real_path in match:
                            shutil.copy2(match, report_folder)
                            data_file_name = os.path.basename(match)
                            thumb = f'<img src="{report_folder}/{data_file_name}"></img>'
            data_list.append((row[0], row[1], row[2], thumb))

        description = 'Teams Messages'
        report = ArtifactHtmlReport('Teams Messages')
        report.start_artifact_report(report_folder, 'Teams Messages',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Name', 'Message', 'Shared Media')
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Shared Media'])
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Microsoft Teams Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Microsoft Teams Messages data available')

    cursor.execute('''
    SELECT
    ZDISPLAYNAME,
    zemail,
    ZPHONENUMBER
    from
    ZDEVICECONTACTHASH
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = 'Teams Contact'
        report = ArtifactHtmlReport('Teams Contact')
        report.start_artifact_report(report_folder, 'Teams Contact',
                                     description)
        report.add_script()
        data_headers = ('Display Name', 'Email', 'Phone Number')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Teams Contact'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Teams Contact data available')

    cursor.execute('''
    SELECT
    datetime('2001-01-01', "ZTS_LASTSYNCEDAT" || ' seconds'),
    ZDISPLAYNAME,
    ZTELEPHONENUMBER
    from zuser
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = 'Teams User'
        report = ArtifactHtmlReport('Teams User')
        report.start_artifact_report(report_folder, 'Teams User', description)
        report.add_script()
        data_headers = ('Timestamp Last Sync', 'Display Name', 'Phone Number')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams User'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Microsoft Teams User'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Teams User data available')

    cursor.execute('''
    SELECT
    ZCOMPOSETIME,
    zfrom,
    ZIMDISPLAYNAME,
    zcontent,
    ZPROPERTIES
    from ZSMESSAGE, ZMESSAGEPROPERTIES
    where ZSMESSAGE.ZTSID = ZMESSAGEPROPERTIES.ZTSID
    order by ZCOMPOSETIME
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list_calls = []
    data_list_cards = []
    data_list_unparsed = []

    if usageentries > 0:
        for row in all_rows:
            plist = ''
            composetime = row[0].replace('T', ' ')
            plist_file_object = io.BytesIO(row[4])
            if row[4].find(b'NSKeyedArchiver') == -1:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(plist_file_object)
                else:
                    plist = biplist.readPlist(plist_file_object)
            else:
                try:
                    plist = nd.deserialize_plist(plist_file_object)
                except (nd.DeserializeError,
                        nd.biplist.NotBinaryPlistException,
                        nd.biplist.InvalidPlistException,
                        nd.plistlib.InvalidFileException,
                        nd.ccl_bplist.BplistError, ValueError, TypeError,
                        OSError, OverflowError) as ex:
                    logfunc(f'Failed to read plist for {row[4]}, error was:' +
                            str(ex))
            if 'call-log' in plist:
                datacalls = json.loads(plist['call-log'])
                callstart = (datacalls.get('startTime'))
                callstart = callstart.replace('T', ' ')
                callconnect = (datacalls.get('connectTime'))
                callconnect = callconnect.replace('T', ' ')
                callend = (datacalls['endTime'])
                callend = callend.replace('T', ' ')
                calldirection = (datacalls['callDirection'])
                calltype = (datacalls['callType'])
                callstate = (datacalls['callState'])
                calloriginator = (datacalls['originator'])
                calltarget = (datacalls['target'])
                calloriginatordname = (
                    datacalls['originatorParticipant']['displayName'])
                callparticipantdname = (
                    datacalls['targetParticipant']['displayName'])
                data_list_calls.append(
                    (composetime, row[1], row[2], row[3], callstart,
                     callconnect, callend, calldirection, calltype, callstate,
                     calloriginator, calltarget, calloriginatordname,
                     callparticipantdname))
            elif 'cards' in plist:
                cards = json.loads(plist['cards'])
                cardurl = (
                    cards[0]['content']['body'][0]['selectAction']['url'])
                cardtitle = (
                    cards[0]['content']['body'][0]['selectAction']['title'])
                cardtext = (cards[0]['content']['body'][1]['text'])
                cardurl2 = (cards[0]['content']['body'][0]['url'])
                if (cards[0]['content']['body'][0].get('id')) is not None:
                    idcontent = json.loads(
                        cards[0]['content']['body'][0]['id'])
                    cardlat = (idcontent.get('latitude'))
                    cardlong = (idcontent.get('longitude'))
                    cardexpires = (idcontent.get('expiresAt'))
                    cardexpires = datetime.datetime.fromtimestamp(cardexpires /
                                                                  1000)
                    carddevid = (idcontent.get('deviceId'))
                data_list_cards.append(
                    (composetime, row[1], row[2], row[3], cardurl, cardtitle,
                     cardtext, cardurl2, cardlat, cardlong, cardexpires,
                     carddevid))
            else:
                data_list_unparsed.append(composetime, row[1], row[2], row[3],
                                          plist)

        description = 'Microsoft Teams Call Logs'
        report = ArtifactHtmlReport('Microsoft Teams Call Logs')
        report.start_artifact_report(report_folder, 'Teams Call Logs',
                                     description)
        report.add_script()
        data_headers = ('Compose Timestamp', 'From', 'Display Name', 'Content',
                        ' Call Start', 'Call Connect', 'Call End',
                        'Call Direction', 'Call Type', 'Call State',
                        'Call Originator', 'Call Target',
                        'Call Originator Name', 'Call Participant Name')
        report.write_artifact_data_table(data_headers, data_list_calls,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Call Logs'
        tsv(report_folder, data_headers, data_list_calls, tsvname)

        tlactivity = 'Microsoft Teams Call Logs'
        timeline(report_folder, tlactivity, data_list_calls, data_headers)

        description = 'Microsoft Teams Shared Locations'
        report = ArtifactHtmlReport('Microsoft Teams Shared Locations')
        report.start_artifact_report(report_folder, 'Teams Shared Locations',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'From', 'Display Name', 'Content',
                        'Card URL', 'Card Title', 'Card Text', 'Card URL2',
                        'Latitude', 'Longitude', 'Card Expires', 'Device ID')
        report.write_artifact_data_table(data_headers, data_list_cards,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Shared Locations'
        tsv(report_folder, data_headers, data_list_cards, tsvname)

        tlactivity = 'Microsoft Teams Shared Locations'
        timeline(report_folder, tlactivity, data_list_cards, data_headers)

        kmlactivity = 'Microsoft Teams Shared Locations'
        kmlgen(report_folder, kmlactivity, data_list_cards, data_headers)

    else:
        logfunc('No Microsoft Teams Call Logs & Cards data available')

    db.close()
Exemplo n.º 22
0
def get_powerlogWifiprop(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    DATETIME(WIFIPROPERTIES_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
    CURRENTSSID,
    CURRENTCHANNEL,
    DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
    SYSTEM AS TIME_OFFSET,
    WIFIPROPERTIES_ID AS "PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES TABLE ID" 
	FROM
    (
    SELECT
        WIFIPROPERTIES_ID,
        WIFIPROPERTIES_TIMESTAMP,
        TIME_OFFSET_TIMESTAMP,
        MAX(TIME_OFFSET_ID) AS MAX_ID,
        CURRENTSSID,
        CURRENTCHANNEL,
        SYSTEM
    FROM
        (
        SELECT
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.TIMESTAMP AS WIFIPROPERTIES_TIMESTAMP,
            CURRENTSSID,
            CURRENTCHANNEL,
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.ID AS "WIFIPROPERTIES_ID" ,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
        FROM
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES
        LEFT JOIN
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
        )
        AS WIFIPROPERTIES_STATE 
    GROUP BY
        WIFIPROPERTIES_ID 
    )    
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report = ArtifactHtmlReport('Powerlog WiFi Network Connections')
        report.start_artifact_report(report_folder, 'WiFi Network Connections')
        report.add_script()
        data_headers = ('Adjusted Timestamp', 'Current SSID',
                        'Current Channel', 'Offset Timestamp', 'Time Offset',
                        'Cummilative Prop. Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Powerlog Wifi Prop'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 23
0
def get_firefox(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found
        ) == 'places.sqlite':  # skip -journal and other files
            continue

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(moz_places.last_visit_date_local/1000, 'unixepoch') AS LastVisitDate,
        moz_places.url AS URL,
        moz_places.title AS Title,
        moz_places.visit_count_local AS VisitCount,
        moz_places.description AS Description,
        CASE
            WHEN moz_places.hidden = 0 THEN 'No'
            WHEN moz_places.hidden = 1 THEN 'Yes'
        END AS Hidden,
        CASE
            WHEN moz_places.typed = 0 THEN 'No'
            WHEN moz_places.typed = 1 THEN 'Yes'
        END AS Typed,
        moz_places.frecency AS Frecency,
        moz_places.preview_image_url AS PreviewImageURL
        FROM
        moz_places
        INNER JOIN moz_historyvisits ON moz_places.origin_id = moz_historyvisits.id
        INNER JOIN moz_places_metadata ON moz_places.id = moz_places_metadata.id
        ORDER BY
        moz_places.last_visit_date_local ASC  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Firefox - Web History')
            report.start_artifact_report(report_folder,
                                         'Firefox - Web History')
            report.add_script()
            data_headers = ('Last Visit Date', 'URL', 'Title', 'Visit Count',
                            'Description', 'Hidden', 'Typed', 'Frecency',
                            'Preview Image URL')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Firefox - Web History'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Firefox - Web History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Firefox - Web History data available')

        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(moz_historyvisits.visit_date/1000, 'unixepoch') AS VisitDate,
        moz_places.url AS URL,
        moz_places.title AS Title,
        moz_historyvisits.id AS VisitID,
        moz_historyvisits.from_visit AS FromVisitID,
        CASE moz_historyvisits.visit_type
            WHEN 1 THEN 'TRANSITION_LINK'
            WHEN 2 THEN 'TRANSITION_TYPED'
            WHEN 3 THEN 'TRANSITION_BOOKMARK'
            WHEN 4 THEN 'TRANSITION_EMBED'
            WHEN 5 THEN 'TRANSITION_REDIRECT_PERMANENT'
            WHEN 6 THEN 'TRANSITION_REDIRECT_TEMPORARY'
            WHEN 7 THEN 'TRANSITION_DOWNLOAD'
            WHEN 8 THEN 'TRANSITION_FRAMED_LINK'
            WHEN 9 THEN 'TRANSITION_RELOAD'
        END AS VisitType,
        CASE
            WHEN moz_places.typed = 0 THEN 'No'
            WHEN moz_places.typed = 1 THEN 'Yes'
        END AS Typed
        FROM
        moz_historyvisits
        INNER JOIN moz_places ON moz_places.id = moz_historyvisits.place_id
        ORDER BY
        moz_historyvisits.visit_date ASC  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Firefox - Web Visits')
            report.start_artifact_report(report_folder, 'Firefox - Web Visits')
            report.add_script()
            data_headers = ('Visit Date', 'URL', 'Title', 'Visit ID',
                            'From Visit ID', 'Visit Type', 'Typed')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Firefox - Web Visits'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Firefox - Web Visits'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Firefox - Web Visits data available')

        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(moz_bookmarks.dateAdded/1000,'unixepoch'),
        datetime(moz_bookmarks.lastModified/1000,'unixepoch'),
        moz_bookmarks.title,
        moz_places.url,
        CASE moz_bookmarks.type
            WHEN 1 THEN 'URL'
            WHEN 2 THEN 'Folder'
            WHEN 3 THEN 'Separator'
        END,
        moz_bookmarks.id,
        moz_bookmarks.parent,
        moz_bookmarks.position,
        moz_bookmarks.syncStatus
        FROM moz_bookmarks
        LEFT JOIN moz_places ON moz_bookmarks.fk = moz_places.id
        ORDER BY moz_bookmarks.id ASC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Firefox - Bookmarks')
            report.start_artifact_report(report_folder, 'Firefox - Bookmarks')
            report.add_script()
            data_headers = ('Added Timestamp', 'Modified Timestamp', 'Title',
                            'URL', 'Bookmark Type', 'ID', 'Parent', 'Position',
                            'Sync Status')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Firefox - Bookmarks'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Firefox - Bookmarks'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Firefox - Bookmarks data available')

        cursor = db.cursor()
        cursor.execute('''
        SELECT
        id AS 'ID',
        term AS 'Search Term'
        FROM moz_places_metadata_search_queries
        ORDER BY id ASC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Firefox - Search Terms')
            report.start_artifact_report(report_folder,
                                         'Firefox - Search Terms')
            report.add_script()
            data_headers = ('ID', 'Search Term')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Firefox - Search Terms'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Firefox - Search Terms data available')

        db.close()
Exemplo n.º 24
0
def get_mailprotect(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf

    if version.parse(iOSversion) <= version.parse("11"):
        logfunc("Unsupported version for iOS emails in iOS " + iOSversion)
        return ()

    if version.parse(iOSversion) < version.parse("13"):
        head, end = os.path.split(files_found[0])
        tempf = report_folder
        db = sqlite3.connect(report_folder + "/emails.db")
        cursor = db.cursor()
        cursor.execute("""
		create table email1(rowid int, ds text, dr text, size int, sender text, messid int, subject text, receipt text, cc text, bcc text)
		""")
        db.commit()

        cursor.execute("""
		create table email2(rowid int, data text)
		""")
        db.commit()

        db = sqlite3.connect(head + "/Envelope Index")
        db.execute(f'ATTACH DATABASE "{head}/Protected Index" AS PI')
        db.execute(f'ATTACH DATABASE "{tempf}/emails.db" AS emails')

        cursor = db.cursor()
        cursor.execute("""
		select  
		main.messages.ROWID,
		main.messages.date_sent,
		main.messages.date_received,
		main.messages.size,
		PI.messages.sender,
		PI.messages.message_id,
		PI.messages.subject,
		PI.messages._to,
		PI.messages.cc,
		PI.messages.bcc
		from main.messages, PI.messages
		where main.messages.ROWID =  PI.messages.message_id 
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            print(f"Total emails {str(usageentries)}")
            usageentries1 = str(usageentries)
            for row in all_rows:
                # print(row)
                datainsert = (
                    row[0],
                    row[1],
                    row[2],
                    row[3],
                    row[4],
                    row[5],
                    row[6],
                    row[7],
                    row[8],
                    row[9],
                )
                cursor.execute(
                    "INSERT INTO emails.email1 (rowid, ds, dr, size, sender, messid, subject, receipt, cc, bcc)  VALUES(?,?,?,?,?,?,?,?,?,?)",
                    datainsert,
                )
                db.commit()
        else:
            print("Zero rows")

        cursor = db.cursor()
        cursor.execute("""
		select  
		main.messages.ROWID,
		PI.message_data.data
		from main.message_data, main.messages, PI.messages, PI.message_data
		where main.messages.ROWID = main.message_data.message_id and PI.messages.message_id = main.message_data.message_id 
		and PI.message_data.message_data_id = main.message_data.ROWID
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            print(f"Total emails with message data {str(usageentries)}")
            usageentries2 = str(usageentries)
            for row in all_rows:
                datainsert = (
                    row[0],
                    row[1],
                )
                cursor.execute(
                    "INSERT INTO emails.email2 (rowid, data)  VALUES(?,?)",
                    datainsert,
                )
                db.commit()
        else:
            print("Zero rows")

        cursor.execute("""
		select 
		email1.rowid,
		datetime(email1.ds, 'unixepoch') as ds,
		datetime(email1.dr, 'unixepoch') as dr,
		email1.sender, 
		email1.messid, 
		email1.subject, 
		email1.receipt, 
		email1.cc,
		email1.bcc,
		email2.data 
		from email1
		left outer join email2
		on email2.rowid = email1.rowid
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[9], row[7], row[8]))

            file_found = head
            description = ''
            report = ArtifactHtmlReport('iOS Mail')
            report.start_artifact_report(report_folder, 'Emails', description)
            report.add_script()
            data_headers = ('Row ID', 'Date Sent', 'Date Received', 'Sender',
                            'Message ID', 'Subject', 'Recipient', 'Message',
                            'CC', 'BCC')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'iOS Mail'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc("No iOS emails available")

    if version.parse(iOSversion) >= version.parse("13"):
        head, end = os.path.split(files_found[0])
        tempf = report_folder
        db = sqlite3.connect(head + "/Envelope Index")
        db.execute(f'ATTACH DATABASE "{head}/Protected Index" AS PI')

        cursor = db.cursor()
        cursor.execute("""
		SELECT
		datetime(main.messages.date_sent, 'UNIXEPOCH') as datesent,
		datetime(main.messages.date_received, 'UNIXEPOCH') as datereceived,
		PI.addresses.address,
		PI.addresses.comment,
		PI.subjects.subject,
		PI.summaries.summary,
		main.messages.read,
		main.messages.flagged,
		main.messages.deleted,
		main.mailboxes.url
		from main.mailboxes, main.messages, PI.subjects, PI.addresses, PI.summaries
		where main.messages.subject = PI.subjects.ROWID 
		and main.messages.sender = PI.addresses.ROWID 
		and main.messages.summary = PI.summaries.ROWID
		and main.mailboxes.ROWID = main.messages.mailbox
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            file_found = head
            description = ''
            report = ArtifactHtmlReport('iOS Mail')
            report.start_artifact_report(report_folder, 'Emails', description)
            report.add_script()
            data_headers = ('Date Sent', 'Date Received', 'Address', 'Comment',
                            'Subject', 'Summary', 'Read?', 'Flagged?',
                            'Deleted', 'Mailbox')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'iOS Mail'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc("No iOS emails available")
Exemplo n.º 25
0
def get_knowCappshortcut(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Location" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
			    WHEN "1" THEN "Sunday"
			    WHEN "2" THEN "Monday"
			    WHEN "3" THEN "Tuesday"
			    WHEN "4" THEN "Wednesday"
			    WHEN "5" THEN "Thursday"
			    WHEN "6" THEN "Friday"
			    WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID",	
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM ZOBJECT
		WHERE ZSTREAMNAME IS "/app/relevantShortcuts"
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        description = ''
        report = ArtifactHtmlReport(
            'KnowledgeC Application Relevant Shortcuts')
        report.start_artifact_report(report_folder, 'App Relevant Shortcuts',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Bundle ID', 'Day of the Week',
                        'GMT Offset', 'Entry Creation', 'UUID',
                        'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Application Relevant Shortcuts'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Application Relevant Shortcuts'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 26
0
def get_cloudkitParticipants(files_found, report_folder, seeker):

    user_dictionary = {}

    for file_found in files_found:
        file_found = str(file_found)

        # Can add a separate section for each file this information is found in.
        # This is for Apple Notes.
        if file_found.endswith('NoteStore.sqlite'):
            db = sqlite3.connect(file_found)
            cursor = db.cursor()
            cursor.execute('''
            SELECT Z_PK, ZSERVERSHAREDATA 
            FROM
            ZICCLOUDSYNCINGOBJECT
            WHERE
            ZSERVERSHAREDATA NOT NULL
            ''')

            all_rows = cursor.fetchall()
            for row in all_rows:

                filename = os.path.join(
                    report_folder,
                    'zserversharedata_' + str(row[0]) + '.bplist')
                output_file = open(filename, "wb")
                output_file.write(row[1])
                output_file.close()

                deserialized_plist = nd.deserialize_plist(io.BytesIO(row[1]))
                for item in deserialized_plist:
                    if 'Participants' in item:
                        for participant in item['Participants']:
                            record_id = participant['UserIdentity'][
                                'UserRecordID']['RecordName']
                            email_address = participant['UserIdentity'][
                                'LookupInfo']['EmailAddress']
                            phone_number = participant['UserIdentity'][
                                'LookupInfo']['PhoneNumber']
                            first_name = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.givenName']
                            middle_name = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.middleName']
                            last_name = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.familyName']
                            name_prefix = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.namePrefix']
                            name_suffix = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.nameSuffix']
                            nickname = participant['UserIdentity'][
                                'NameComponents']['NS.nameComponentsPrivate'][
                                    'NS.nickname']

                            user_dictionary[record_id] = [
                                record_id, email_address, phone_number,
                                name_prefix, first_name, middle_name,
                                last_name, name_suffix, nickname
                            ]
            db.close()

    # Build the array after dealing with all the files
    user_list = list(user_dictionary.values())

    if len(user_list) > 0:
        description = 'CloudKit Participants - Cloudkit accounts participating in CloudKit shares.'
        report = ArtifactHtmlReport('Participants')
        report.start_artifact_report(report_folder, 'Participants',
                                     description)
        report.add_script()
        user_headers = ('Record ID', 'Email Address', 'Phone Number',
                        'Name Prefix', 'First Name', 'Middle Name',
                        'Last Name', 'Name Suffix', 'Nickname')
        report.write_artifact_data_table(user_headers,
                                         user_list,
                                         '',
                                         write_location=False)
        report.end_artifact_report()

        tsvname = 'Cloudkit Participants'
        tsv(report_folder, user_headers, user_list, tsvname)
    else:
        logfunc('No Cloudkit - Cloudkit Participants data available')
Exemplo n.º 27
0
def get_restoreLog(files_found, report_folder, seeker):

    data_list = []
    pattern = 'data = '
    pattern1 = '\"originalOSVersion\":\"'
    pattern2 = '\"currentOSVersion\":\"'
    pattern3 = '\"deviceModel\":\"'
    pattern4 = '\"eventTime\":\"'
    pattern5 = '\"batteryIsCharging\":'
    pattern6 = '\"deviceClass\":\"'
    pattern7 = '\"event\":\"'
    og_version_num = ''
    cur_version_num = ''
    originalOSBuild = ''
    currentOSBuild = ''

    OS_dict = {
        "15A372": "11.0",
        "15A402": "11.0.1",
        "15A403": "11.0.1",
        "15A8391": "11.0.1",
        "15A421": "11.0.2",
        "15A432": "11.0.3",
        "15B93": "11.1",
        "15B101": "11.1",
        "15B150": "11.1.1",
        "15B202": "11.1.2",
        "15C114": "11.2",
        "15C153": "11.2.1",
        "15C202": "11.2.2",
        "15D60": "11.2.5",
        "15D100": "11.2.6",
        "15E216": "11.3",
        "15E218": "11.3",
        "15E302": "11.3.1",
        "15F79": "11.4",
        "15G77": "11.4.1",
        "16A366": "12.0",
        "16A404": "12.0.1",
        "16A405": "12.0.1",
        "16B92": "12.1",
        "16B93": "12.1",
        "16B94": "12.1",
        "16C50": "12.1.1",
        "16C104": "12.1.2",
        "16D39": "12.1.3",
        "16D40": "12.1.3",
        "16D57": "12.1.4",
        "16E227": "12.2",
        "16F156": "12.3",
        "16F203": "12.3.1",
        "16F8202": "12.3.1",
        "16F250": "12.3.2",
        "16G77": "12.4",
        "16G102": "12.4.1",
        "16G114": "12.4.2",
        "16G130": "12.4.3",
        "16G140": "12.4.4",
        "16G161": "12.4.5",
        "16G183": "12.4.6",
        "16G192": "12.4.7",
        "16G201": "12.4.8",
        "16H5": "12.4.9",
        "16H20": "12.5",
        "16H22": "12.5.1",
        "16H30": "12.5.2",
        "16H41": "12.5.3",
        "16H50": "12.5.4",
        "16H62": "12.5.5",
        "17A577": "13.0",
        "17A844": "13.1",
        "17A854": "13.1.1",
        "17A860": "13.1.2",
        "17A861": "13.1.2",
        "17A878": "13.1.3",
        "17B84": "13.2",
        "17B90": "13.2.1",
        "17B102": "13.2.2",
        "17B111": "13.2.3",
        "17C54": "13.3",
        "17D50": "13.3.1",
        "17E255": "13.4",
        "17E8255": "13.4",
        "17E262": "13.4.1",
        "17E8258": "13.4.1",
        "17F75": "13.5",
        "17F80": "13.5.1",
        "17G68": "13.6",
        "17G80": "13.6.1",
        "17H35": "13.7",
        "18A373": "14.0",
        "18A393": "14.0.1",
        "18A8395": "14.1",
        "18B92": "14.2",
        "18B111": "14.2",
        "18B121": "14.2.1",
        "18C66": "14.3",
        "18D52": "14.4",
        "18D61": "14.4.1",
        "18D70": "14.4.2",
        "18E199": "14.5",
        "18E212": "14.5.1",
        "18F72": "14.6",
        "18G69": "14.7",
        "18G82": "14.7.1",
        "18H17": "14.8",
        "18H107": "14.8.1",
        "19A346": "15.0",
        "19A348": "15.0.1",
        "19A404": "15.0.2",
        "19B74": "15.1",
        "19B81": "15.1.1",
        "19C56": "15.2",
        "19C57": "15.2",
        "19C63": "15.2.1",
        "19D50": "15.3",
        "19D52": "15.3.1",
        "19E241": "15.4",
        "19E258": "15.4.1",
        "19F77": "15.5",
    }

    for file_found in files_found:
        file_found = str(file_found)

        with open(file_found, "r", encoding="utf-8") as f:
            data = f.readlines()
            for line in data:
                if pattern in line:

                    if pattern1 in line:

                        if pattern1 in line:
                            splitline1 = line.partition(pattern1)[2]
                            originalOSBuild = splitline1[:splitline1.find("\""
                                                                          )]

                            for key, value in OS_dict.items():
                                if originalOSBuild == key:
                                    og_version_num = value
                                    break
                                else:
                                    og_version_num = "Unknown"
                        else:
                            pass

                        if pattern2 in line:
                            splitline2 = line.partition(pattern2)[2]
                            currentOSBuild = splitline2[:splitline2.find("\"")]

                            for key, value in OS_dict.items():
                                if currentOSBuild == key:
                                    cur_version_num = value
                                    break
                                else:
                                    cur_version_num = "Unknown"

                        if pattern3 in line:
                            splitline3 = line.partition(pattern3)[2]
                            deviceModel = splitline3[:splitline3.find("\"")]
                        else:
                            pass

                        if pattern4 in line:
                            splitline4 = line.partition(pattern4)[2]
                            eventTime = splitline4[:splitline4.find("\"")]
                            timestamp_formatted = datetime.datetime.fromtimestamp(
                                int(eventTime) /
                                1000).strftime('%Y-%m-%d %H:%M:%S')
                        else:
                            pass

                        if pattern5 in line:
                            splitline5 = line.partition(pattern5)[2]
                            batteryIsCharging = splitline5[:splitline5.find(","
                                                                            )]
                        else:
                            pass

                        if pattern6 in line:
                            splitline6 = line.partition(pattern6)[2]
                            deviceClass = splitline6[:splitline6.find("\"")]
                        else:
                            pass

                        if pattern7 in line:
                            splitline7 = line.partition(pattern7)[2]
                            event = splitline7[:splitline7.find("\"")]
                        else:
                            pass

                        data_list.append((timestamp_formatted, originalOSBuild,
                                          og_version_num, currentOSBuild,
                                          cur_version_num, event, deviceClass,
                                          deviceModel, batteryIsCharging))

                    else:
                        pass

    num_entries = len(data_list)
    if num_entries > 0:
        report = ArtifactHtmlReport('Mobile Software Update - Restore Log')
        report.start_artifact_report(report_folder,
                                     'Mobile Software Update - Restore Log')
        report.add_script()
        data_headers = ('Timestamp', 'Original OS Build',
                        'Original OS Version', 'Current OS Build',
                        'Current OS Version', 'Event', 'Device', 'Model',
                        'Battery Is Charging')

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Mobile Software Update - Restore Log'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Mobile Software Update - Restore Log'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Mobile Software Update - Restore Log data available')
Exemplo n.º 28
0
def get_knowCsafari(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
		SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZSTRUCTUREDMETADATA.Z_DKSAFARIHISTORYMETADATAKEY__TITLE AS "TITLE",
				ZOBJECT.ZVALUESTRING AS "URL", 
				ZSOURCE.ZBUNDLEID AS "BUNDLE ID",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
					WHEN "1" THEN "Sunday"
					WHEN "2" THEN "Monday"
					WHEN "3" THEN "Tuesday"
					WHEN "4" THEN "Wednesday"
					WHEN "5" THEN "Thursday"
					WHEN "6" THEN "Friday"
					WHEN "7" THEN "Saturday"
				END "DAY OF WEEK", 
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
				ZOBJECT.ZUUID AS "UUID", 
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
			FROM
				ZOBJECT 
				LEFT JOIN
					ZSTRUCTUREDMETADATA 
					ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
				LEFT JOIN
					ZSOURCE 
					ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
			WHERE
				ZSTREAMNAME IS "/safari/history"
			''')
    elif version.parse(iOSversion) == version.parse("11"):
        cursor = db.cursor()
        cursor.execute('''
		SELECT
				DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
				DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
				ZOBJECT.ZVALUESTRING AS "URL", 
				ZSOURCE.ZBUNDLEID AS "BUNDLE ID",
				CASE ZOBJECT.ZSTARTDAYOFWEEK 
					WHEN "1" THEN "Sunday"
					WHEN "2" THEN "Monday"
					WHEN "3" THEN "Tuesday"
					WHEN "4" THEN "Wednesday"
					WHEN "5" THEN "Thursday"
					WHEN "6" THEN "Friday"
					WHEN "7" THEN "Saturday"
				END "DAY OF WEEK", 
				ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
				DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
				ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
			FROM
				ZOBJECT 
				LEFT JOIN
					ZSTRUCTUREDMETADATA 
					ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
				LEFT JOIN
					ZSOURCE 
					ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
			WHERE
				ZSTREAMNAME IS "/safari/history"	
					''')
    else:
        logfunc("Unsupported version for KnowledgC Safari iOS " + iOSversion)
        return ()

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            report = ArtifactHtmlReport('KnowledgeC Safari Browsing')
            report.start_artifact_report(report_folder, 'Safari Browsing')
            report.add_script()
            data_headers = ('Start', 'End', 'Title', 'URL', 'Bundle ID',
                            'Day of Week', 'GMT Offset', 'Entry Creation',
                            'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Safari Browsing'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Safari Browsing'
            timeline(report_folder, tlactivity, data_list)
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7]))

            report = ArtifactHtmlReport('KnowledgeC Safari Browsing')
            report.start_artifact_report(report_folder, 'Safari Browsing')
            report.add_script()
            data_headers = ('Start', 'End', 'URL', 'Bundle ID', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Safari Browsing'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Safari Browsing'
            timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in table')

    db.close()
    return
def get_sbrowserDownloads(files_found, report_folder, seeker):
    
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    tab_url,
    CASE
        start_time  
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(start_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "Start Time", 
    CASE
        end_time 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(end_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "End Time", 
    CASE
        last_access_time 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(last_access_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "Last Access Time", 
    target_path,
    state,
    opened,
    received_bytes,
    total_bytes
    FROM
    downloads
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Browser Downloads')
        report.start_artifact_report(report_folder, 'Browser Downloads')
        report.add_script()
        data_headers = ('URL','Start Time','End Time','Last Access Time','Target Path','State','Opened?','Received Bytes','Total Bytes' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Browser download data available')
    
    db.close()
    return
Exemplo n.º 30
0
def get_knowCusage(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",  
			ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)", 
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
			ZOBJECT.ZUUID AS "UUID",
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/app/usage" 
    )
	""")
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                          row[6], row[7], row[8], row[9], row[10]))

    description = ''
    report = ArtifactHtmlReport('KnowledgeC App Usage')
    report.start_artifact_report(report_folder, 'KnowledgeC App Usage',
                                 description)
    report.add_script()
    data_headers = ('Start', 'End', 'Bundle ID', 'Usage in Seconds',
                    'Usage in Minutes', 'Device ID', 'Day of the Week',
                    'GMT Offset', 'Entry Creation', 'UUID', 'Zobject Table ID')
    report.write_artifact_data_table(data_headers,
                                     data_list,
                                     file_found,
                                     html_escape=False)
    report.end_artifact_report()

    tsvname = 'KnowledgeC App Usage'
    tsv(report_folder, data_headers, data_list, tsvname)