Beispiel #1
0
def get_restoreLog(files_found, report_folder, seeker):

    data_list = []
    pattern = 'data = '
    pattern1 = '\"originalOSVersion\":\"'
    pattern2 = '\"currentOSVersion\":\"'
    pattern3 = '\"deviceModel\":\"'
    pattern4 = '\"eventTime\":\"'
    pattern5 = '\"batteryIsCharging\":'
    pattern6 = '\"deviceClass\":\"'
    pattern7 = '\"event\":\"'
    og_version_num = ''
    cur_version_num = ''
    originalOSBuild = ''
    currentOSBuild = ''

    OS_dict = {
        "15A372": "11.0",
        "15A402": "11.0.1",
        "15A403": "11.0.1",
        "15A8391": "11.0.1",
        "15A421": "11.0.2",
        "15A432": "11.0.3",
        "15B93": "11.1",
        "15B101": "11.1",
        "15B150": "11.1.1",
        "15B202": "11.1.2",
        "15C114": "11.2",
        "15C153": "11.2.1",
        "15C202": "11.2.2",
        "15D60": "11.2.5",
        "15D100": "11.2.6",
        "15E216": "11.3",
        "15E218": "11.3",
        "15E302": "11.3.1",
        "15F79": "11.4",
        "15G77": "11.4.1",
        "16A366": "12.0",
        "16A404": "12.0.1",
        "16A405": "12.0.1",
        "16B92": "12.1",
        "16B93": "12.1",
        "16B94": "12.1",
        "16C50": "12.1.1",
        "16C104": "12.1.2",
        "16D39": "12.1.3",
        "16D40": "12.1.3",
        "16D57": "12.1.4",
        "16E227": "12.2",
        "16F156": "12.3",
        "16F203": "12.3.1",
        "16F8202": "12.3.1",
        "16F250": "12.3.2",
        "16G77": "12.4",
        "16G102": "12.4.1",
        "16G114": "12.4.2",
        "16G130": "12.4.3",
        "16G140": "12.4.4",
        "16G161": "12.4.5",
        "16G183": "12.4.6",
        "16G192": "12.4.7",
        "16G201": "12.4.8",
        "16H5": "12.4.9",
        "16H20": "12.5",
        "16H22": "12.5.1",
        "16H30": "12.5.2",
        "16H41": "12.5.3",
        "16H50": "12.5.4",
        "16H62": "12.5.5",
        "17A577": "13.0",
        "17A844": "13.1",
        "17A854": "13.1.1",
        "17A860": "13.1.2",
        "17A861": "13.1.2",
        "17A878": "13.1.3",
        "17B84": "13.2",
        "17B90": "13.2.1",
        "17B102": "13.2.2",
        "17B111": "13.2.3",
        "17C54": "13.3",
        "17D50": "13.3.1",
        "17E255": "13.4",
        "17E8255": "13.4",
        "17E262": "13.4.1",
        "17E8258": "13.4.1",
        "17F75": "13.5",
        "17F80": "13.5.1",
        "17G68": "13.6",
        "17G80": "13.6.1",
        "17H35": "13.7",
        "18A373": "14.0",
        "18A393": "14.0.1",
        "18A8395": "14.1",
        "18B92": "14.2",
        "18B111": "14.2",
        "18B121": "14.2.1",
        "18C66": "14.3",
        "18D52": "14.4",
        "18D61": "14.4.1",
        "18D70": "14.4.2",
        "18E199": "14.5",
        "18E212": "14.5.1",
        "18F72": "14.6",
        "18G69": "14.7",
        "18G82": "14.7.1",
        "18H17": "14.8",
        "18H107": "14.8.1",
        "19A346": "15.0",
        "19A348": "15.0.1",
        "19A404": "15.0.2",
        "19B74": "15.1",
        "19B81": "15.1.1",
        "19C56": "15.2",
        "19C57": "15.2",
        "19C63": "15.2.1",
        "19D50": "15.3",
        "19D52": "15.3.1",
        "19E241": "15.4",
        "19E258": "15.4.1",
        "19F77": "15.5",
    }

    for file_found in files_found:
        file_found = str(file_found)

        with open(file_found, "r", encoding="utf-8") as f:
            data = f.readlines()
            for line in data:
                if pattern in line:

                    if pattern1 in line:

                        if pattern1 in line:
                            splitline1 = line.partition(pattern1)[2]
                            originalOSBuild = splitline1[:splitline1.find("\""
                                                                          )]

                            for key, value in OS_dict.items():
                                if originalOSBuild == key:
                                    og_version_num = value
                                    break
                                else:
                                    og_version_num = "Unknown"
                        else:
                            pass

                        if pattern2 in line:
                            splitline2 = line.partition(pattern2)[2]
                            currentOSBuild = splitline2[:splitline2.find("\"")]

                            for key, value in OS_dict.items():
                                if currentOSBuild == key:
                                    cur_version_num = value
                                    break
                                else:
                                    cur_version_num = "Unknown"

                        if pattern3 in line:
                            splitline3 = line.partition(pattern3)[2]
                            deviceModel = splitline3[:splitline3.find("\"")]
                        else:
                            pass

                        if pattern4 in line:
                            splitline4 = line.partition(pattern4)[2]
                            eventTime = splitline4[:splitline4.find("\"")]
                            timestamp_formatted = datetime.datetime.fromtimestamp(
                                int(eventTime) /
                                1000).strftime('%Y-%m-%d %H:%M:%S')
                        else:
                            pass

                        if pattern5 in line:
                            splitline5 = line.partition(pattern5)[2]
                            batteryIsCharging = splitline5[:splitline5.find(","
                                                                            )]
                        else:
                            pass

                        if pattern6 in line:
                            splitline6 = line.partition(pattern6)[2]
                            deviceClass = splitline6[:splitline6.find("\"")]
                        else:
                            pass

                        if pattern7 in line:
                            splitline7 = line.partition(pattern7)[2]
                            event = splitline7[:splitline7.find("\"")]
                        else:
                            pass

                        data_list.append((timestamp_formatted, originalOSBuild,
                                          og_version_num, currentOSBuild,
                                          cur_version_num, event, deviceClass,
                                          deviceModel, batteryIsCharging))

                    else:
                        pass

    num_entries = len(data_list)
    if num_entries > 0:
        report = ArtifactHtmlReport('Mobile Software Update - Restore Log')
        report.start_artifact_report(report_folder,
                                     'Mobile Software Update - Restore Log')
        report.add_script()
        data_headers = ('Timestamp', 'Original OS Build',
                        'Original OS Version', 'Current OS Build',
                        'Current OS Version', 'Event', 'Device', 'Model',
                        'Battery Is Charging')

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Mobile Software Update - Restore Log'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Mobile Software Update - Restore Log'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Mobile Software Update - Restore Log data available')
Beispiel #2
0
def get_locationDsteps(files_found, report_folder, seeker):
	file_found = str(files_found[0])
	#os.chmod(file_found, 0o0777)
	db = sqlite3.connect(file_found)
	
	iOSversion = scripts.artifacts.artGlobals.versionf
	if version.parse(iOSversion) >= version.parse("10"):
		cursor = db.cursor()
		cursor.execute(
		"""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		PACE AS "PACE",
		ACTIVETIME AS "ACTIVE TIME",
		FIRSTSTEPTIME AS "FIRST STEP TIME",
		PUSHCOUNT AS "PUSH COUNT",
		WORKOUTTYPE AS "WORKOUT TYPE",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY
		""")

		all_rows = cursor.fetchall()
		usageentries = len(all_rows)
		data_list = []    
		if usageentries > 0:
			for row in all_rows:
				data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12]))
		
			description = ''
			report = ArtifactHtmlReport('LocationD Steps')
			report.start_artifact_report(report_folder, 'Steps', description)
			report.add_script()
			data_headers = ('Start Time','Movement Time','Count','Distance','Raw Distance','Floors Ascended','Floors Descended','Pace','Active Time','First Step Time','Push Count','Workout Type','Table ID')     
			report.write_artifact_data_table(data_headers, data_list, file_found)
			report.end_artifact_report()
			
			tsvname = 'LocationD Steps'
			tsv(report_folder, data_headers, data_list, tsvname)
			
			tlactivity = 'LocationD Steps'
			timeline(report_folder, tlactivity, data_list)
		else:
			logfunc('No data available for Steps')
			
	elif version.parse(iOSversion) >= version.parse("9"):
		cursor = db.cursor()
		cursor.execute(
		"""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		PACE AS "PACE",
		ACTIVETIME AS "ACTIVE TIME",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY
		""")

		all_rows = cursor.fetchall()
		usageentries = len(all_rows)
		data_list = []    
		if usageentries > 0:
			for row in all_rows:
				data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9]))
		
			description = ''
			report = ArtifactHtmlReport('LocationD Steps')
			report.start_artifact_report(report_folder, 'Steps', description)
			report.add_script()
			data_headers = ('Start Time','Movement Time','Count','Distance','Raw Distance','Floors Ascended','Floors Descended','Pace','Active Time','Table ID')     
			report.write_artifact_data_table(data_headers, data_list, file_found)
			report.end_artifact_report()
			
			tsvname = 'LocationD Steps'
			tsv(report_folder, data_headers, data_list, tsvname)
		else:
			logfunc('No data available for Steps')
	
	elif version.parse(iOSversion) >= version.parse("8"):
		cursor = db.cursor()
		cursor.execute(
		"""
		SELECT 
		DATETIME(STARTTIME + 978307200, 'UNIXEPOCH') AS "START TIME",
		TIMESTAMP AS "MOVEMENT TIME",
		COUNT AS "COUNT", 
		DISTANCE AS "DISTANCE", 
		RAWDISTANCE AS "RAWDISTANCE",
		FLOORSASCENDED AS "FLOORS ASCENDED",
		FLOORSDESCENDED AS "FLOORS DESCENDED",
		STEPCOUNTHISTORY.ID AS "STEPCOUNTHISTORY TABLE ID"
		FROM STEPCOUNTHISTORY		
		""")

		all_rows = cursor.fetchall()
		usageentries = len(all_rows)
		data_list = []    
		if usageentries > 0:
			for row in all_rows:
				data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7]))
		
			description = ''
			report = ArtifactHtmlReport('LocationD Steps')
			report.start_artifact_report(report_folder, 'Steps', description)
			report.add_script()
			data_headers = ('Start Time','Movement Time','Count','Distance','Raw Distance','Floors Ascended','Floors Descended','Table ID')     
			report.write_artifact_data_table(data_headers, data_list, file_found)
			report.end_artifact_report()
			
			tsvname = 'LocationD Steps'
			tsv(report_folder, data_headers, data_list, tsvname)
		else:
			logfunc('No data available for Steps')

	else:
		logfunc('No data available for Steps')
	
	db.close()
	return 
	
Beispiel #3
0
def get_fitbitSocial(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    OWNING_USER_ID,
    ENCODED_ID,
    DISPLAY_NAME,
    AVATAR_URL,
    FRIEND,
    CHILD
    FROM FRIEND
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Fitbit Friends')
        report.start_artifact_report(report_folder, 'Fitbit Friends')
        report.add_script()
        data_headers = ('Owning UserID', 'Encoded ID', 'Display Name',
                        'Avatar URL', 'Friend', 'Child')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Fitbit Friends'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Fitbit Friend data available')

    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime("LAST_UPDATED"/1000, 'unixepoch'),
    DISPLAY_NAME,
    FULL_NAME,
    ABOUT_ME,
    AVATAR_URL,
    COVER_PHOTO_URL,
    CITY,
    STATE,
    COUNTRY,
    datetime("JOINED_DATE"/1000, 'unixepoch'),
    datetime("DATE_OF_BIRTH"/1000, 'unixepoch'),
    HEIGHT,
    WEIGHT,
    GENDER,
    COACH
    FROM USER_PROFILE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Fitbit User Profile')
        report.start_artifact_report(report_folder, 'Fitbit User Profile')
        report.add_script()
        data_headers = ('Last Updated', 'Display Name', 'Full Name',
                        'About Me', 'Avatar URL', 'Cover Photo URL', 'City',
                        'State', 'Country', 'Joined Date', 'Date of Birth',
                        'Height', 'Weight', 'Gender', 'Coach')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Fitbit User Profile'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Fitbit User Profile'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Fitbit User Profile data available')

    db.close()
Beispiel #4
0
def get_knowCuserwaking(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC User Waking Event " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/system/userWakingEvent" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC User Waking Event')
        report.start_artifact_report(report_folder, 'User Waking Event',
                                     description)
        report.add_script()
        data_headers = ('Start', 'End', 'Day of Week', 'GMT Offset',
                        'Entry Creation', 'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC User Waking Event'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC User Waking Event'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #5
0
def get_package_info(files_found, report_folder, seeker, wrap_text):
    packages = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(
                file_found):  # skip folders (there shouldn't be any)
            continue

        file_name = os.path.basename(file_found)
        if (checkabx(file_found)):
            multi_root = False
            tree = abxread(file_found, multi_root)
            xlmstring = (etree.tostring(tree.getroot()).decode())
            doc = xmltodict.parse(xlmstring)
        else:
            with open(file_found) as fd:
                doc = xmltodict.parse(fd.read())

        package_dict = doc.get('packages', {}).get('package', {})
        for package in package_dict:
            name = package.get('@name', '')
            ft = ReadUnixTimeMs(package.get('@ft', None))
            it = ReadUnixTimeMs(package.get('@it', None))
            ut = ReadUnixTimeMs(package.get('@ut', None))
            install_originator = package.get('@installOriginator', '')
            installer = package.get('@installer', '')
            code_path = package.get('@codePath', '')
            public_flags = hex(
                int(package.get('@publicFlags', 0)) & (2**32 - 1))
            private_flags = hex(
                int(package.get('@privateFlags', 0)) & (2**32 - 1))
            package = Package(name, ft, it, ut, install_originator, installer,
                              code_path, public_flags, private_flags)
            packages.append(package)

        if len(packages):
            break

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    entries = len(packages)
    if entries > 0:
        description = "All packages (user installed, oem installed and system) appear here. Many of these are not user apps"
        report = ArtifactHtmlReport('Packages')
        report.start_artifact_report(report_folder, 'Packages', description)
        report.add_script()
        data_headers = ('ft', 'Name', 'Install Time', 'Update Time',
                        'Install Originator', 'Installer', 'Code Path',
                        'Public Flags', 'Private Flags')
        data_list = []
        for p in packages:
            data_list.append((p.ft, p.name, p.install_time, p.update_time,
                              p.install_originator, p.installer, p.code_path,
                              p.public_flags, p.private_flags))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Packages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Packages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No package data available')
def get_routineDLocationsLocal(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for RoutineD Locations Local.sqlite on iOS " + iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)
            
            if file_found.endswith('Local.sqlite'):
                break
                
        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
            (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "ENTRY TIME (MINUTES)",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
        FROM
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
            LEFT JOIN
                ZRTLEARNEDLOCATIONOFINTERESTMO 
                ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []    
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13]))
            
            description = 'Significant Locations - Location of Interest Entry (Historical)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD Locations Entry', description)
            report.add_script()
            data_headers = ('Entry','Exit','Entry Time (Minutes)','Coordinates','Latitude', 'Longitude','Confidence','Location Uncertainty','Data Point Count','Place Creation Date','Expiration','Visit latitude', 'Visit Longitude', 'Table ID' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'RoutineD Locations Entry'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'RoutineD Locations Entry'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Significant Locations Entry data available')
            
        
        cursor.execute('''
        SELECT
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
               (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "EXIT TIME (MINUTES)",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
            FROM
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
               LEFT JOIN
                  ZRTLEARNEDLOCATIONOFINTERESTMO 
                  ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST
        ''')

        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13]))
            
            description = 'Significant Locations - Location of Interest Exit (Historical)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD Locations Exit', description)
            report.add_script()
            data_headers = ('Entry','Exit','Entry Time (Minutes)','Coordinates','Latitude', 'Longitude','Confidence','Location Uncertainty','Data Point Count','Place Creation Date','Expiration','Visit latitude', 'Visit Longitude', 'Table ID' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'RoutineD Locations Exit'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'RoutineD Locations Exit'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Significant Locations Exit data available')
            
        if version.parse(iOSversion) >= version.parse("12"):
          cursor.execute('''
          SELECT
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
                  (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "EXIT TIME (MINUTES)",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
              FROM
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
                  LEFT JOIN
                    ZRTLEARNEDLOCATIONOFINTERESTMO 
                    ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST       
          ''')

          all_rows = cursor.fetchall()
          usageentries = len(all_rows)
          data_list = []    
          if usageentries > 0:
              for row in all_rows:
                  data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
              
              description = 'Significant Locations - Location of Interest Transition Start (Historical)'
              report = ArtifactHtmlReport('Locations')
              report.start_artifact_report(report_folder, 'RoutineD Transtition Start', description)
              report.add_script()
              data_headers = ('Start','Stop','Transition Time (Minutes)','Coordinates','Creation Date', 'Expiration','Latitude','Longitude', 'Table ID' )     
              report.write_artifact_data_table(data_headers, data_list, file_found)
              report.end_artifact_report()
              
              tsvname = 'RoutineD Transtition Start'
              tsv(report_folder, data_headers, data_list, tsvname)
              
              tlactivity = 'RoutineD Transtition Start'
              timeline(report_folder, tlactivity, data_list, data_headers)

          else:
              logfunc('No RoutineD Significant Locations Transtition Start data available')

        if (version.parse(iOSversion) >= version.parse("11")) and (version.parse(iOSversion) < version.parse("12")):
          cursor.execute('''
          ELECT
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZSTARTDATE + 978307200, 'UNIXEPOCH') AS "START",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZSTOPDATE + 978307200, 'UNIXEPOCH') AS "STOP",
               (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "TRANSITION TIME (MINUTES)",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "CREATION DATE",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO TABLE ID" 
            FROM
               ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO 
               LEFT JOIN
                  ZRTLEARNEDLOCATIONOFINTERESTMO 
                  ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZLOCATIONOFINTEREST
          ''')

          all_rows = cursor.fetchall()
          usageentries = len(all_rows)
          data_list = []    
          if usageentries > 0:
              for row in all_rows:
                  data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
              
              description = 'Significant Locations - Location of Interest Transition Stop (Historical)'
              report = ArtifactHtmlReport('Locations')
              report.start_artifact_report(report_folder, 'RoutineD Transtition Stop', description)
              report.add_script()
              data_headers = ('Start','Stop','Transition Time (Minutes)','Coordinates','Creation Date', 'Expiration','Latitude','Longitude', 'Table ID' )     
              report.write_artifact_data_table(data_headers, data_list, file_found)
              report.end_artifact_report()
              
              tsvname = 'RoutineD Transtition Stop'
              tsv(report_folder, data_headers, data_list, tsvname)
              
              tlactivity = 'RoutineD Transtition Stop'
              timeline(report_folder, tlactivity, data_list, data_headers)

          else:
              logfunc('No RoutineD Significant Locations Transtition Stop data available')
Beispiel #7
0
def get_Viber(files_found, report_folder, seeker, wrap_text):
    
    for file_found in files_found:
        
        if file_found.endswith('_messages'):
           viber_messages_db = str(file_found)
#        file_found = str(file_found)
 
        if file_found.endswith('_data'):
           viber_data_db = str(file_found)        
#        if file_found.endswith('-db'):
#            break
        
    db = open_sqlite_db_readonly(viber_data_db)
    cursor = db.cursor()
    try:
        cursor.execute('''
        SELECT canonized_number, case type when 2 then "Outgoing" else "Incoming" end AS direction, 
               duration as duration_in_seconds, date AS start_time, 
               case viber_call_type when 1 then "Audio Call" when 4 then "Video Call" else "Unknown" end AS call_type
          FROM calls 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0
        
    if usageentries > 0:
        report = ArtifactHtmlReport('Viber - call logs')
        report.start_artifact_report(report_folder, 'Viber - call logs')
        report.add_script()
        data_headers = ('canonized_number','call_direction', 'duration_in_seconds', 'start_time', 'call_type') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'Viber - call logs'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Viber - call logs'
        timeline(report_folder, tlactivity, data_list, data_headers)
        
    else:
        logfunc('No Viber Call Logs found')
        
    try:        
        cursor.execute('''
        SELECT C.display_name, coalesce(D.data2, D.data1, D.data3) as phone_number 
          FROM phonebookcontact AS C JOIN phonebookdata AS D ON C._id = D.contact_id
        ''')
        
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0
        
    if usageentries > 0:
        report = ArtifactHtmlReport('Viber - Contacts')
        report.start_artifact_report(report_folder, 'Viber - Contacts')
        report.add_script()
        data_headers = ('display_name','phone_number') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))
            
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'Viber - Contacts'
        tsv(report_folder, data_headers, data_list, tsvname)
        
    else:
        logfunc('No Viber Contacts data available')

    db.close()

    db = open_sqlite_db_readonly(viber_messages_db)
    cursor = db.cursor()
    try:
        cursor.execute('''
                     SELECT convo_participants.from_number AS from_number, 
                            convo_participants.recipients  AS recipients, 
                            M.conversation_id              AS thread_id, 
                            M.body                         AS msg_content, 
                            case M.send_type when 1 then "Outgoing" else "Incoming" end AS direction, 
                            M.msg_date                     AS msg_date, 
                            case M.unread when 0 then "Read" else "Unread" end AS read_status,
                            M.extra_uri                    AS file_attachment                            
                     FROM   (SELECT *, 
                                    group_concat(TO_RESULT.number) AS recipients 
                             FROM   (SELECT P._id     AS FROM_ID, 
                                            P.conversation_id, 
                                            PI.number AS FROM_NUMBER 
                                     FROM   participants AS P 
                                            JOIN participants_info AS PI 
                                              ON P.participant_info_id = PI._id) AS FROM_RESULT 
                                    JOIN (SELECT P._id AS TO_ID, 
                                                 P.conversation_id, 
                                                 PI.number 
                                          FROM   participants AS P 
                                                 JOIN participants_info AS PI 
                                                   ON P.participant_info_id = PI._id) AS TO_RESULT 
                                      ON FROM_RESULT.from_id != TO_RESULT.to_id 
                                         AND FROM_RESULT.conversation_id = TO_RESULT.conversation_id 
                             GROUP  BY FROM_RESULT.from_id) AS convo_participants 
                            JOIN messages AS M 
                              ON M.participant_id = convo_participants.from_id 
                                 AND M.conversation_id = convo_participants.conversation_id
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0
        
    if usageentries > 0:
        report = ArtifactHtmlReport('Viber - Messages')
        report.start_artifact_report(report_folder, 'Viber - Messages')
        report.add_script()
        data_headers = ('from_number','recipients', 'thread_id', 'msg_content', 'direction', 'msg_date', 'read_status', 'file_attachment') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'Viber - Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Viber - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
        
    else:
        logfunc('No Viber Messages found')

    db.close
    
    return
Beispiel #8
0
def get_calllog(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(date /1000, 'unixepoch') as date,
    CASE
        WHEN phone_account_address is NULL THEN ' '
        ELSE phone_account_address
        end as phone_account_address,
    number,
    CASE
        WHEN type = 1 THEN  'Incoming'
        WHEN type = 2 THEN  'Outgoing'
        WHEN type = 3 THEN  'Missed'
        WHEN type = 4 THEN  'Voicemail'
        WHEN type = 5 THEN  'Rejected'
        WHEN type = 6 THEN  'Blocked'
        WHEN type = 7 THEN  'Answered Externally'
        ELSE 'Unknown'
        end as types,
    duration,
    CASE
        WHEN geocoded_location is NULL THEN ' '
        ELSE geocoded_location
        end as geocoded_location,
    countryiso,
    CASE
        WHEN _data is NULL THEN ' '
        ELSE _data
        END as _data,
    CASE
        WHEN mime_type is NULL THEN ' '
        ELSE mime_type
        END as mime_type,
    CASE
        WHEN transcription is NULL THEN ' '
        ELSE transcription
        END as transcription,
    deleted
    FROM
    calls
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Call logs')
        report.start_artifact_report(report_folder, 'Call logs')
        report.add_script()
        data_headers = ('Call Date', 'Phone Account Address', 'Partner',
                        'Type', 'Duration in Secs', 'Partner Location',
                        'Country ISO', 'Data', 'Mime Type', 'Transcription',
                        'Deleted')
        data_list = []
        for row in all_rows:
            # Setup icons for call type
            call_type = row[3]
            if call_type == 'Incoming':
                call_type_html = call_type + ' <i data-feather="phone-incoming" stroke="green"></i>'
            elif call_type == 'Outgoing':
                call_type_html = call_type + ' <i data-feather="phone-outgoing" stroke="green"></i>'
            elif call_type == 'Missed':
                call_type_html = call_type + ' <i data-feather="phone-missed" stroke="red"></i>'
            elif call_type == 'Voicemail':
                call_type_html = call_type + ' <i data-feather="voicemail" stroke="brown"></i>'
            elif call_type == 'Rejected':
                call_type_html = call_type + ' <i data-feather="x" stroke="red"></i>'
            elif call_type == 'Blocked':
                call_type_html = call_type + ' <i data-feather="phone-off" stroke="red"></i>'
            elif call_type == 'Answered Externally':
                call_type_html = call_type + ' <i data-feather="phone-forwarded"></i>'
            else:
                call_type_html = call_type

            data_list.append(
                (row[0], row[1], row[2], call_type_html, str(row[4]), row[5],
                 row[6], row[7], row[8], row[9], str(row[10])))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'Call Logs'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Call Logs'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Call Log data available')

    db.close()
    return
Beispiel #9
0
def get_chromeAutofill(files_found, report_folder, seeker, wrap_text):
    
    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(file_found) == 'Web Data': # skip -journal and other files
            continue
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find('mirror') >= 0:
            continue # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()

        columns = [i[1] for i in cursor.execute('PRAGMA table_info(autofill)')]
        if 'date_created' in columns:
            cursor.execute(f'''
            select
                datetime(date_created, 'unixepoch'),
                name,
                value,
                datetime(date_last_used, 'unixepoch'),
                count
            from autofill
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            if usageentries > 0:
                report = ArtifactHtmlReport(f'{browser_name} - Autofill - Entries')
                #check for existing and get next name for report file, so report from another file does not get overwritten
                report_path = os.path.join(report_folder, f'{browser_name} - Autofill - Entries.temphtml')
                report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
                report.start_artifact_report(report_folder, os.path.basename(report_path))
                report.add_script()
                data_headers = ('Date Created','Field','Value','Date Last Used','Count')
                data_list = []
                for row in all_rows:
                    data_list.append((row[0],row[1],row[2],row[3],row[4]))

                report.write_artifact_data_table(data_headers, data_list, file_found)
                report.end_artifact_report()
                
                tsvname = f'{browser_name} - Autofill - Entries'
                tsv(report_folder, data_headers, data_list, tsvname)
                
                tlactivity = f'{browser_name} - Autofill - Entries'
                timeline(report_folder, tlactivity, data_list, data_headers)
            else:
                logfunc(f'No {browser_name} - Autofill - Entries data available')
                
        else:
            cursor.execute(f'''
            select
                datetime(autofill_dates.date_created, 'unixepoch'),
                autofill.name,
                autofill.value,
                autofill.count
            from autofill
            join autofill_dates on autofill_dates.pair_id = autofill.pair_id
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            if usageentries > 0:
                report = ArtifactHtmlReport(f'{browser_name} - Autofill - Entries')
                #check for existing and get next name for report file, so report from another file does not get overwritten
                report_path = os.path.join(report_folder, f'{browser_name} - Autofill - Entries.temphtml')
                report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
                report.start_artifact_report(report_folder, os.path.basename(report_path))
                report.add_script()
                data_headers = ('Date Created','Field','Value','Count')
                data_list = []
                for row in all_rows:
                    data_list.append((row[0],row[1],row[2],row[3]))

                report.write_artifact_data_table(data_headers, data_list, file_found)
                report.end_artifact_report()
                
                tsvname = f'{browser_name} - Autofill - Entries'
                tsv(report_folder, data_headers, data_list, tsvname)
                
                tlactivity = f'{browser_name} - Autofill - Entries'
                timeline(report_folder, tlactivity, data_list, data_headers)
            else:
                logfunc(f'No {browser_name} - Autofill - Entries data available')
        
        cursor.execute(f'''
        select
            datetime(date_modified, 'unixepoch'),
            autofill_profiles.guid,
            autofill_profile_names.first_name,
            autofill_profile_names.middle_name,
            autofill_profile_names.last_name,
            autofill_profile_emails.email,
            autofill_profile_phones.number,
            autofill_profiles.company_name,
            autofill_profiles.street_address,
            autofill_profiles.city,
            autofill_profiles.state,
            autofill_profiles.zipcode,
            datetime(use_date, 'unixepoch'),
            autofill_profiles.use_count
        from autofill_profiles
        inner join autofill_profile_emails ON autofill_profile_emails.guid = autofill_profiles.guid
        inner join autofill_profile_phones ON autofill_profiles.guid = autofill_profile_phones.guid
        inner join autofill_profile_names ON autofill_profile_phones.guid = autofill_profile_names.guid
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} - Autofill - Profiles')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder, f'{browser_name} - Autofill - Profiles.temphtml')
            report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
            report.start_artifact_report(report_folder, os.path.basename(report_path))
            report.add_script()
            data_headers = ('Date Modified','GUID','First Name','Middle Name','Last Name','Email','Phone Number','Company Name','Address','City','State','Zip Code','Date Last Used','Use Count')
            data_list = []
            for row in all_rows:
                data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13]))

            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'{browser_name} - Autofill - Profiles'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'{browser_name} - Autofill - Profiles'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Autofill - Profiles data available')
        
        db.close()
Beispiel #10
0
def get_calendarAll(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute("""
	select 
	title,
	flags,
	color,
	symbolic_color_name,
	external_id,
	self_identity_email
	from Calendar
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        description = ''
        report = ArtifactHtmlReport('Calendar List')
        report.start_artifact_report(report_folder, 'List', description)
        report.add_script()
        data_headers = ('Title', 'Flags', 'Color', 'Symbolic Color Name',
                        'External ID', 'Self Identity Email')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Calendar List '
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available for Calendar List')

    cursor.execute("""
	Select
	DATETIME(start_date + 978307200, 'UNIXEPOCH') as startdate,
	start_tz,
	DATETIME(end_date + 978307200, 'UNIXEPOCH') as enddate,
	end_tz,
	all_day,
	summary,
	calendar_id,
	DATETIME(last_modified+ 978307200, 'UNIXEPOCH') as lastmod
	from CalendarItem
	order by startdate
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        description = ''
        report = ArtifactHtmlReport('Calendar Items')
        report.start_artifact_report(report_folder, 'Items', description)
        report.add_script()
        data_headers = ('Start Date', 'Start Timezone', 'End Date',
                        'End Timezone', 'All Day?', 'Summary', 'Calendar ID',
                        'Last Modified')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Calendar Items'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Calendar Items'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for Calendar Items')

    cursor.execute("""
	SELECT
	display_name,
	address,
	first_name,
	last_name
	from Identity
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3]))

        description = ''
        report = ArtifactHtmlReport('Calendar Identity')
        report.start_artifact_report(report_folder, 'Identity', description)
        report.add_script()
        data_headers = ('Display Name', 'Address', 'First Name', 'Last Name')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Calendar Identity'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available for Calendar Identity')
Beispiel #11
0
def get_locationDcellloc(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_celllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP", 
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	LAC AS "LAC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PSC AS "PSC",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD Cell Location')
        report.start_artifact_report(report_folder, 'Cell Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'LAC', 'CI',
                        'UARFCN', 'PSC', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD Cell Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD Cell Location')

    db.close()
    return
Beispiel #12
0
def get_googleNowPlaying(files_found, report_folder, seeker, wrap_text):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif not file_found.endswith('history_db'):
            continue # Skip all other files (-wal)

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        Select
        CASE
            timestamp 
            WHEN
                "0" 
            THEN
                "" 
            ELSE
                datetime(timestamp / 1000, "unixepoch")
        END AS "timestamp",
        history_entry
        FROM
        recognition_history
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            description = 'This is data stored by the Now Playing feature in Pixel phones, which '\
                        'shows song data on the lock screen for any music playing nearby. It\'s ' \
                        'part of <a href="https://play.google.com/store/apps/details?id=com.google.intelligence.sense"'\
                        ' target="_blank">Pixel Ambient Services</a> or part of <a href="https://play.google.com/store/apps/details?id=com.google.android.as"'\
                        ' target="_blank">Pixel Device Personalization Services</a> depending on OS version.'
            report = ArtifactHtmlReport('Now Playing History')
            report.start_artifact_report(report_folder, 'Now Playing', description)
            report.add_script()
            
            data_headers = ('Timestamp', 'Timezone', 'Song Title', 'Artist', 'Duration',
                            'Album', 'Album Year')
            data_list = []

            pb_types = {'9': {'type': 'message', 'message_typedef': 
                        {
                        '6': {'type': 'double', 'name': ''} # This definition converts field to a double from generic fixed64
                        } }
                        }
            last_data_set = [] # Since there are a lot of similar entries differing only in timestamp, we can combine them.
            
            for row in all_rows:
                timestamp = row[0]
                pb = row[1]

                data, actual_types = blackboxprotobuf.decode_message(pb, pb_types)
                data = recursive_convert_bytes_to_str(data)
                
                try:             timezones = FilterInvalidValue(data["7"])
                except KeyError: timezones = ''

                try:             songtitle = FilterInvalidValue(data["9"]["3"])
                except KeyError: songtitle = ''

                try:             artist = FilterInvalidValue(data["9"]["4"]) 
                except KeyError: artist = ''

                try:             durationinsecs = data["9"]["6"]
                except KeyError: durationinsecs = ''

                try:             album = FilterInvalidValue(data["9"]["13"])
                except KeyError: album = ''

                try:             year = FilterInvalidValue(data["9"]["14"])
                except KeyError: year = ''
                
                if durationinsecs:
                    duration = time.strftime('%H:%M:%S', time.gmtime(durationinsecs))
                if not last_data_set:
                    last_data_set = [timestamp, escape(timezones), escape(songtitle), escape(artist), duration, escape(album), year]
                elif AreContentsSame(last_data_set, timezones, songtitle, artist, duration, album, year):
                    if last_data_set[0] == timestamp: # exact duplicate, do not add
                        pass
                    else:
                        last_data_set[0] += ',<br />' + timestamp
                else:
                    data_list.append(last_data_set)
                    last_data_set = []
            if last_data_set:
                data_list.append(last_data_set)
            logfunc("{} entries grouped into {}".format(usageentries, len(data_list)))
            report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
            report.end_artifact_report()
            
            tsvname = f'google now playing'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Google Now Playing'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Now playing history')

        db.close()
        return
Beispiel #13
0
def get_knowCappact(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
                DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
                ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
                ZSOURCE.ZGROUPID AS "GROUP ID",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__CONTENTDESCRIPTION AS "CONTENT DESCRIPTION",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYREQUIREDSTRING AS "USER ACTIVITY REQUIRED STRING",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__SUGGESTEDINVOCATIONPHRASE AS "SUGGESTED IN VOCATION PHRASE",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
                ZSOURCE.ZSOURCEID AS "SOURCE ID",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMIDENTIFIER AS "ID",
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYUUID AS "ACTIVITY UUID",
                CASE ZOBJECT.ZSTARTDAYOFWEEK 
                    WHEN "1" THEN "Sunday"
                    WHEN "2" THEN "Monday"
                    WHEN "3" THEN "Tuesday"
                    WHEN "4" THEN "Wednesday"
                    WHEN "5" THEN "Thursday"
                    WHEN "6" THEN "Friday"
                    WHEN "7" THEN "Saturday"
                END "DAY OF WEEK",
                ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
                DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
                DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE",
                ZOBJECT.ZUUID AS "UUID", 
                ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
            FROM
               ZOBJECT 
               LEFT JOIN
                  ZSTRUCTUREDMETADATA 
                  ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
               LEFT JOIN
                  ZSOURCE 
                  ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
            WHERE
               ZSTREAMNAME IS "/app/activity"

        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
                DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
                ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__TITLE AS "TITLE", 
                ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
                CASE ZOBJECT.ZSTARTDAYOFWEEK 
                    WHEN "1" THEN "Sunday"
                    WHEN "2" THEN "Monday"
                    WHEN "3" THEN "Tuesday"
                    WHEN "4" THEN "Wednesday"
                    WHEN "5" THEN "Thursday"
                    WHEN "6" THEN "Friday"
                    WHEN "7" THEN "Saturday"
                END "DAY OF WEEK",
                ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
                DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
                DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE", 
                ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
            FROM
               ZOBJECT 
               LEFT JOIN
                  ZSTRUCTUREDMETADATA 
                  ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
               LEFT JOIN
                  ZSOURCE 
                  ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
            WHERE
               ZSTREAMNAME IS "/app/activity"
            ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []

        if version.parse(iOSversion) >= version.parse("12"):
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16], row[17], row[18]))

            report = ArtifactHtmlReport('KnowledgeC Application Activity')
            report.start_artifact_report(report_folder, 'Application Activity')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Group ID',
                            'Activity Type', 'Content Description',
                            'User Activity Required String', 'Content URL',
                            'Suggest Invocation Phrase', 'Unique ID',
                            'Source ID', 'ID', 'Activity UUID', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'Expiration Date',
                            'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11]))

            report = ArtifactHtmlReport('KnowledgeC Application Activity')
            report.start_artifact_report(report_folder, 'Application Activity')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Activity Type',
                            'Title', 'Content URL', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'Expiration Date',
                            'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #14
0
def get_Oruxmaps(files_found, report_folder, seeker, wrap_text):
    file_found = str(files_found[0])
    source_file = file_found.replace(seeker.directory, '')
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT poilat, poilon, poialt, poitime/1000, poiname FROM pois
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Oruxmaps POI')
        report.start_artifact_report(report_folder, 'Oruxmaps POI')
        report.add_script()
        data_headers = (
            'poilat', 'poilon', 'poialt', 'poitime', 'poiname'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []

        for row in all_rows:

            timestamp = datetime.datetime.fromtimestamp(int(
                row[3])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append((row[0], row[1], row[2], timestamp, row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Oruxmaps POI'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

        tlactivity = f'Oruxmaps POI'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Oruxmaps POI data available')

    cursor.execute('''
            SELECT tracks._id, trackname, trackciudad, segname, trkptlat, trkptlon, trkptalt, trkpttime/1000 
              FROM tracks, segments, trackpoints
             where tracks._id = segments.segtrack and segments._id = trackpoints.trkptseg
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Oruxmaps Tracks')
        report.start_artifact_report(report_folder, 'Oruxmaps Tracks')
        report.add_script()
        data_headers = (
            'track id', 'track name', 'track description', 'segment name',
            'latitude', 'longitude', 'altimeter', 'datetime'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []

        for row in all_rows:
            timestamp = datetime.datetime.fromtimestamp(int(
                row[7])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], timestamp))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Oruxmaps Tracks'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

        tlactivity = f'Oruxmaps Tracks'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Oruxmaps Tracks data available')

    db.close()
    return
def get_powerlogVolumePercentage(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_audio_volume.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute('''
    SELECT
    DATETIME(VOLUME_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
    VOLUME AS "VOLUME PERCENTAGE",
    CASE MUTED 
        WHEN "0" THEN "NO" 
        WHEN "1" THEN "YES" 
    END AS "MUTED",
    DATETIME(VOLUME_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_VOLUME_TIMESTAMP,
    DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
    SYSTEM AS TIME_OFFSET,
    VOLUME_ID AS "PLAUDIOAGENT_EVENTFORWARD_OUTPUT TABLE ID" 
    FROM
    (
    SELECT
        VOLUME_ID,
        VOLUME_TIMESTAMP,
        TIME_OFFSET_TIMESTAMP,
        MAX(TIME_OFFSET_ID) AS MAX_ID,
        VOLUME,
        MUTED,
        SYSTEM
    FROM
        (
        SELECT
            PLAUDIOAGENT_EVENTFORWARD_OUTPUT.TIMESTAMP AS VOLUME_TIMESTAMP,
            VOLUME,
            MUTED,
            PLAUDIOAGENT_EVENTFORWARD_OUTPUT.ID AS "VOLUME_ID" ,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
        FROM
            PLAUDIOAGENT_EVENTFORWARD_OUTPUT
        LEFT JOIN
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET
        )
        AS VOLUME_STATE 
    GROUP BY
        VOLUME_ID 
    )
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

        report = ArtifactHtmlReport('Powerlog Volume Percentage')
        report.start_artifact_report(report_folder, 'Volume Percentage')
        report.add_script()
        data_headers = ('Adjusted Timestamp', 'Volume Percentage', 'Muted',
                        'Original Volume Timestamp', 'Offset Timestamp',
                        'Time Offset', 'Event Forward Output Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Powerlog Volume Percentage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Powerlog Volume Percentage'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
def get_knowCappactsafari(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_safari_browsing.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
            DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
            ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__CONTENTDESCRIPTION AS "CONTENT DESCRIPTION",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYREQUIREDSTRING AS "USER ACTIVITY REQUIRED STRING",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMIDENTIFIER AS "ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__USERACTIVITYUUID AS "ACTIVITY UUID",
            ZSOURCE.ZSOURCEID AS "SOURCE ID",
            CASE ZOBJECT.ZSTARTDAYOFWEEK 
              WHEN "1" THEN "Sunday"
              WHEN "2" THEN "Monday"
              WHEN "3" THEN "Tuesday"
              WHEN "4" THEN "Wednesday"
              WHEN "5" THEN "Thursday"
              WHEN "6" THEN "Friday"
              WHEN "7" THEN "Saturday"
            END "DAY OF WEEK",
            ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
            DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
            DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE",
            ZOBJECT.ZUUID AS "UUID", 
            ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
          FROM
             ZOBJECT 
             LEFT JOIN
                ZSTRUCTUREDMETADATA 
                ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
             LEFT JOIN
                ZSOURCE 
                ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
          WHERE
             ZSTREAMNAME IS "/app/activity"
             AND ("BUNDLE ID" = "com.apple.mobilesafari" OR "BUNDLE ID" = "com.apple.Safari")
        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
            DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
            ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ACTIVITYTYPE AS "ACTIVITY TYPE", 
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDCONTENTURL AS "CONTENT URL",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMIDENTIFIER AS "ID",
            ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__ITEMRELATEDUNIQUEIDENTIFIER AS "UNIQUE ID",
            ZSOURCE.ZSOURCEID AS "SOURCE ID",
            CASE ZOBJECT.ZSTARTDAYOFWEEK 
              WHEN "1" THEN "Sunday"
              WHEN "2" THEN "Monday"
              WHEN "3" THEN "Tuesday"
              WHEN "4" THEN "Wednesday"
              WHEN "5" THEN "Thursday"
              WHEN "6" THEN "Friday"
              WHEN "7" THEN "Saturday"
            END "DAY OF WEEK",
            ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
            DATETIME(ZOBJECT.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ENTRY CREATION",
            DATETIME(ZSTRUCTUREDMETADATA.Z_DKAPPLICATIONACTIVITYMETADATAKEY__EXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION DATE",
            ZOBJECT.ZUUID AS "UUID", 
            ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
          FROM
             ZOBJECT 
             LEFT JOIN
                ZSTRUCTUREDMETADATA 
                ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
             LEFT JOIN
                ZSOURCE 
                ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
          WHERE
             ZSTREAMNAME IS "/app/activity"
             AND ("BUNDLE ID" = "com.apple.mobilesafari" OR "BUNDLE ID" = "com.apple.Safari")
            ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []

        if version.parse(iOSversion) >= version.parse("12"):

            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            report = ArtifactHtmlReport(
                'KnowledgeC Application Activity Safari')
            report.start_artifact_report(report_folder,
                                         'Application Activity Safari')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Activity Type',
                            'Content Description', 'Content URL',
                            'User Activity Required String', 'ID', 'Unique ID',
                            'Activity UUID', 'Source ID', 'Day of Week',
                            'GMT Offset', 'Entry Creation', 'Expiration Date',
                            'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity Safari'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity Safari'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            report = ArtifactHtmlReport(
                'KnowledgeC Application Activty Safari')
            report.start_artifact_report(report_folder,
                                         'Application Activity Safari')
            report.add_script()
            data_headers = ('Start', 'End', 'Bundle ID', 'Activity Type',
                            'Content URL', 'ID', 'Unique ID', 'Source ID',
                            'Day of Week', 'GMT Offset', 'Entry Creation',
                            'Expiration Date', 'UUID', 'ZOBJECT Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'KnowledgeC Application Activity Safari'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'KnowledgeC Application Activity Safari'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #17
0
def get_healthElevation(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        DATETIME(SAMPLES.START_DATE + 978307200, 'UNIXEPOCH') AS "START DATE",
        DATETIME(SAMPLES.END_DATE + 978307200, 'UNIXEPOCH') AS "END DATE",
        METADATA_VALUES.NUMERICAL_VALUE/100.00 AS "ELEVATION (METERS)",
        (METADATA_VALUES.NUMERICAL_VALUE/100.00)*3.28084 AS "ELEVATION (FEET)",
        CASE WORKOUTS.ACTIVITY_TYPE 
            WHEN 63 THEN "HIGH INTENSITY INTERVAL TRAINING (HIIT)" 
            WHEN 37 THEN "INDOOR / OUTDOOR RUN" 
            WHEN 3000 THEN "OTHER" 
            WHEN 52 THEN "INDOOR / OUTDOOR WALK" 
            WHEN 20 THEN "FUNCTIONAL TRAINING" 
            WHEN 13 THEN "INDOOR CYCLE" 
            WHEN 16 THEN "ELLIPTICAL" 
            WHEN 35 THEN "ROWER" 
            ELSE "UNKNOWN" || "-" || WORKOUTS.ACTIVITY_TYPE 
        END "WORKOUT TYPE", 
        WORKOUTS.DURATION / 60.00 AS "DURATION (IN MINUTES)", 
        WORKOUTS.TOTAL_ENERGY_BURNED AS "CALORIES BURNED", 
        WORKOUTS.TOTAL_DISTANCE AS "DISTANCE IN KILOMETERS",
        WORKOUTS.TOTAL_DISTANCE*0.621371 AS "DISTANCE IN MILES",  
        WORKOUTS.TOTAL_BASAL_ENERGY_BURNED AS "TOTAL BASEL ENERGY BURNED", 
            CASE WORKOUTS.GOAL_TYPE 
                WHEN 2 THEN "MINUTES" 
                WHEN 0 THEN "OPEN" 
            END "GOAL TYPE",
        WORKOUTS.GOAL AS "GOAL", 
        WORKOUTS.TOTAL_FLIGHTS_CLIMBED AS "FLIGHTS CLIMBED", 
        WORKOUTS.TOTAL_W_STEPS AS "STEPS" 
        FROM
        SAMPLES 
        LEFT OUTER JOIN
            METADATA_VALUES 
            ON METADATA_VALUES.OBJECT_ID = SAMPLES.DATA_ID 
        LEFT OUTER JOIN
            METADATA_KEYS 
            ON METADATA_KEYS.ROWID = METADATA_VALUES.KEY_ID 
        LEFT OUTER JOIN
            WORKOUTS 
            ON WORKOUTS.DATA_ID = SAMPLES.DATA_ID 
        WHERE
        WORKOUTS.ACTIVITY_TYPE NOT NULL AND (KEY IS "_HKPrivateWorkoutElevationAscendedQuantity" OR KEY IS "HKElevationAscended")
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        report = ArtifactHtmlReport('Health Workout Indoor Elevation')
        report.start_artifact_report(report_folder, 'Workout Indoor Elevation')
        report.add_script()
        data_headers = ('Start Date', 'End Date', 'Elevation in Meters',
                        'Elevation in Feet', 'Workout Type',
                        'Duration in Min.', 'Calories Burned',
                        'Distance in KM', 'Distance in Miles',
                        'Total Base Energy Burned', 'Goal Type', 'Goal',
                        'Flights Climbed', 'Steps')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Health Elevation'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Health Elevation'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #18
0
def get_powerlogLocuseapp(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_location_client_status.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(LOCATIONAGENT_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
            DATETIME(TIMESTAMPLOGGED+ SYSTEM, 'UNIXEPOCH') AS "TIMESTAMP LOGGED (ADJ)",
            DATETIME(TIMESTAMPEND + SYSTEM, 'UNIXEPOCH') AS "TIMESTAMP END (ADJ)",
            BUNDLEID AS "BUNDLE ID",
            TYPE AS "TYPE",
            LOCATIONDESIREDACCURACY AS "LOCATION DESIRED ACCURACY",
            LOCATIONDISTANCEFILTER AS "LOCATION DISTANCE FILTER",
            CLIENT AS "CLIENT",
            EXECUTABLE AS "EXECUTABLE",
            DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
            SYSTEM AS TIME_OFFSET,
            LOCATIONAGENT_ID AS "PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS TABLE ID" 
        	FROM
            (
            SELECT
                LOCATIONAGENT_ID,
                LOCATIONAGENT_TIMESTAMP,
                TIME_OFFSET_TIMESTAMP,
                MAX(TIME_OFFSET_ID) AS MAX_ID,
                TIMESTAMPEND,
                TIMESTAMPLOGGED,
                BUNDLEID,
                TYPE,
                LOCATIONDESIREDACCURACY,
                LOCATIONDISTANCEFILTER,
                CLIENT,
                EXECUTABLE,
                SYSTEM
            FROM
                (
                SELECT
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS.TIMESTAMP AS LOCATIONAGENT_TIMESTAMP,
                    TIMESTAMPEND,
                    TIMESTAMPLOGGED,
                    BUNDLEID,
                    TYPE,
                    LOCATIONDESIREDACCURACY,
                    LOCATIONDISTANCEFILTER,
                    CLIENT,
                    EXECUTABLE,
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS.ID AS "LOCATIONAGENT_ID" ,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
                FROM
                    PLLOCATIONAGENT_EVENTFORWARD_CLIENTSTATUS
                LEFT JOIN
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET
                )
                AS LOCATIONAGENT_STATE 
            GROUP BY
                LOCATIONAGENT_ID 
            )        
        ''')
        
        
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:    
                    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11]))

                report = ArtifactHtmlReport('Powerlog Location Use by App')
                report.start_artifact_report(report_folder, 'Location Use by App')
                report.add_script()
                data_headers = ('Adjusted Timestamp','Timestamp Logged','Timestamp End','Bundle ID','Type','Location Desired Accuracy','Location Distance Filter','Client','Executable','Offset Timestamp','Time Offset', 'Client Status Table ID' )   
                report.write_artifact_data_table(data_headers, data_list, file_found)
                report.end_artifact_report()
                
                tsvname = 'Powerlog Location Use by App'
                tsv(report_folder, data_headers, data_list, tsvname)
                
                tlactivity = 'Powerlog Location Use by App'
                timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No data available in Location Use by App')

        db.close()
        return      
Beispiel #19
0
def get_powerlogWifiprop(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/powerlog_wifi_properties.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute('''
    SELECT
    DATETIME(WIFIPROPERTIES_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
    CURRENTSSID,
    CURRENTCHANNEL,
    DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
    SYSTEM AS TIME_OFFSET,
    WIFIPROPERTIES_ID AS "PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES TABLE ID" 
	FROM
    (
    SELECT
        WIFIPROPERTIES_ID,
        WIFIPROPERTIES_TIMESTAMP,
        TIME_OFFSET_TIMESTAMP,
        MAX(TIME_OFFSET_ID) AS MAX_ID,
        CURRENTSSID,
        CURRENTCHANNEL,
        SYSTEM
    FROM
        (
        SELECT
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.TIMESTAMP AS WIFIPROPERTIES_TIMESTAMP,
            CURRENTSSID,
            CURRENTCHANNEL,
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES.ID AS "WIFIPROPERTIES_ID" ,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
        FROM
            PLWIFIAGENT_EVENTBACKWARD_CUMULATIVEPROPERTIES
        LEFT JOIN
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
        )
        AS WIFIPROPERTIES_STATE 
    GROUP BY
        WIFIPROPERTIES_ID 
    )    
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report = ArtifactHtmlReport('Powerlog WiFi Network Connections')
        report.start_artifact_report(report_folder, 'WiFi Network Connections')
        report.add_script()
        data_headers = ('Adjusted Timestamp', 'Current SSID',
                        'Current Channel', 'Offset Timestamp', 'Time Offset',
                        'Cummilative Prop. Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Powerlog Wifi Prop'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Powerlog Wifi Prop'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
def get_powerlogProcessdatausage(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
            DATETIME(TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
            DATETIME(TIMESTAMPEND + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_END_TIMESTAMP,
            BUNDLENAME AS 'BUNDLE ID',
            PROCESSNAME AS 'PROCESS NAME',
            CELLIN AS 'CELL IN',
            CELLOUT AS 'CELL OUT',
            WIFIIN AS 'WIFI IN',
            WIFIOUT AS 'WIFI OUT',
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_TIMESTAMP,
            DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
            SYSTEM AS TIME_OFFSET,
            TABLE_ID AS "PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF TABLE ID"
        FROM
        (
        SELECT
            TABLE_ID,
            TIMESTAMP,
            TIME_OFFSET_TIMESTAMP,
            MAX(TIME_OFFSET_ID) AS MAX_ID,
            TIMESTAMPEND,
            BUNDLENAME,
            PROCESSNAME,
            CELLIN,
            CELLOUT,
            WIFIIN,
            WIFIOUT,
            SYSTEM
        FROM
        (
        SELECT
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.TIMESTAMP,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.TIMESTAMPEND,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.BUNDLENAME,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.PROCESSNAME,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.CELLIN,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.CELLOUT,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.WIFIIN,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.WIFIOUT,
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF.ID AS "TABLE_ID",
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM
        FROM
            PLPROCESSNETWORKAGENT_EVENTINTERVAL_USAGEDIFF 
        LEFT JOIN
            PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
        )
        GROUP BY
        TABLE_ID 
             )
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11]))

        report = ArtifactHtmlReport('Powerlog Process Data Usage')
        report.start_artifact_report(report_folder, 'Process Data Usage')
        report.add_script()
        data_headers = ('Adjusted Timestamp', 'Adjusted End Timestamp',
                        'Bundle ID', 'Process Name', 'Cell In', 'Cell Out',
                        'WiFI In', 'WiFi Out', 'Original Timestamp',
                        'Offset Timestamp', 'Time Offset',
                        'Usage Diff Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Powerlog Process Data Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Powerlog Process Data Usage'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #21
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		bundleid,
		altitude,
		horizontalaccuracy,
		verticalaccuracy,
		state,
		age,
		routinemode,
		locationofinteresttype,
		latitude,
		longitude,
		speed,
		course,
		confidence
		from appharvest
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Altitude',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'State', 'Age', 'Routine Mode',
                            'Location of Interest Type', 'Latitude',
                            'Longitude', 'Speed', 'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    if does_table_exist(db, "cdmacelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		sid,
		nid,
		bsid,
		zoneid,
		bandclass,
		channel,
		pnoffset,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from cdmacelllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            description = ''
            report = ArtifactHtmlReport('LocationD CDMA Location')
            report.start_artifact_report(report_folder, 'CDMA Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'SID', 'NID', 'BSID', 'ZONEID',
                            'BANDCLASS', 'Channel', 'PNOFFSET', 'Altitude',
                            'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD CDMA Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD CDMA Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD CDMA Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD CDMA Location')

    if does_table_exist(db, "celllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		lac,
		ci,
		uarfcn,
		psc,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from celllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13], row[14]))

            description = ''
            report = ArtifactHtmlReport('LocationD Cell Location')
            report.start_artifact_report(report_folder, 'Cell Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'LAC', 'CI', 'UARFCN',
                            'PSC', 'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD Cell Location')

    if does_table_exist(db, "ltecelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select 
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		ci,
		uarfcn,
		pid,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from ltecelllocation	
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD LTE Location')
            report.start_artifact_report(report_folder, 'LTE Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'CI', 'UARFCN', 'PID',
                            'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD LTE Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD LTE Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD LTE Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    cursor.execute("""
	select
	datetime(timestamp + 978307200,'unixepoch'),
	mac,
	channel,
	infomask,
	speed,
	course,
	confidence,
	score,
	reach,
	horizontalaccuracy,
	verticalaccuracy,
	latitude,
	longitude
	from wifilocation
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'MAC', 'Channel', 'Infomask', 'Speed',
                        'Course', 'Confidence', 'Score', 'Reach',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')
Beispiel #22
0
def get_routineDlocations(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("10"):
        logfunc(
            "Unsupported version for RoutineD Locations Cache.sqlite on iOS " +
            iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)

            if file_found.endswith('Cache.sqlite'):
                break

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(ztimestamp + 978307200, 'unixepoch'),
        zaltitude,
        zcourse,
        zspeed,
        zspeed*2.23694,
        zspeed*3.6,
        zhorizontalaccuracy,
        zverticalaccuracy,
        zlatitude,
        zlongitude 
        from
        zrtcllocationmo
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            description = 'Granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD ZRTCLLOCATIONMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Altitude', 'Course', 'Speed (M/S)',
                            'Speed (MPH)', 'Speed (KMPH)',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTCLLOCATIONMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTCLLOCATIONMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTCLLOCATIONMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTCLLOCATIONMO data available')

        cursor.execute('''
        select
        datetime(zdate + 978307200, 'unixepoch'),
        zsource,
        zlatitude,
        zlongitude
        from
        zrthintmo 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            description = 'Semi-granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTHINTMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Source', 'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTHINTMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTHINTMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTHINTMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTHINTMO data available')

        cursor.execute('''
        select
        datetime(zentrydate + 978307200, 'unixepoch'),
        datetime(zexitdate + 978307200, 'unixepoch'),
        datetime(zdetectiondate + 978307200, 'unixepoch'),
        (zexitdate-zentrydate)/60.00,
        ztype,
        zlocationlatitude,
        zlocationlongitude,
        zlocationuncertainty
        from
        zrtvisitmo
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7]))

            description = 'Visit locations'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTVISITMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Exit Timestamp',
                            'Detection Timestamp', 'Visit Time (Minutes)',
                            'Type', 'Latitude', 'Longitude', 'Uncertainty')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTVISITMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTVISITMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTVISITMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTVISITMO data available')
Beispiel #23
0
def get_knowCwatchnear(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) == version.parse("11"):
        pass
    else:
        logfunc("Unsupported version for KnowledgC Watch Near on iOS " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_device_watch_nearby.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			CASE ZOBJECT.ZVALUEINTEGER
				WHEN '0' THEN 'NO' 
				WHEN '1' THEN 'YES' 
			END "WATCH NEARBY",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		  (ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK", 
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
			ZOBJECT.ZUUID AS "UUID", 
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID"
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME is "/watch/nearby" 	
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Watch Near')
        report.start_artifact_report(report_folder, 'Watch Near', description)
        report.add_script()
        data_headers = ('Start', 'End', 'Watch Nearby', 'Usage in Seconds',
                        'Usage in Minutes', 'Day of Week', 'GMT Offset',
                        'Entry Creation', 'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Watch Near'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Watch Near'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #24
0
def get_chrome(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'History':  # skip -journal and other files
            continue
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()

        #Web History
        cursor.execute('''
        SELECT
        datetime(last_visit_time/1000000 + (strftime('%s','1601-01-01')),'unixepoch') AS LastVisitDate,
        url AS URL,
        title AS Title,
        visit_count AS VisitCount,
        typed_count AS TypedCount,
        id AS ID,
        CASE hidden
            WHEN 0 THEN ''
            WHEN 1 THEN 'Yes'
        END as Hidden
        FROM urls  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} - Web History')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(
                report_folder, f'{browser_name} - Web History.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Visit Time', 'URL', 'Title', 'Visit Count',
                            'Typed Count', 'ID', 'Hidden')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append((row[0], textwrap.fill(row[1], width=100),
                                      row[2], row[3], row[4], row[5], row[6]))
                else:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6]))
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Web History'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} - Web History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Web History data available')

        #Web Visits
        cursor.execute('''
        SELECT
        datetime(visits.visit_time/1000000 + (strftime('%s','1601-01-01')),'unixepoch'),
        urls.url,
        urls.title,
        CASE visits.visit_duration
            WHEN 0 THEN ''
            ELSE strftime('%H:%M:%f', visits.visit_duration / 1000000.000,'unixepoch')
        END as Duration,
        CASE visits.transition & 0xff
            WHEN 0 THEN 'LINK'
            WHEN 1 THEN 'TYPED'
            WHEN 2 THEN 'AUTO_BOOKMARK'
            WHEN 3 THEN 'AUTO_SUBFRAME'
            WHEN 4 THEN 'MANUAL_SUBFRAME'
            WHEN 5 THEN 'GENERATED'
            WHEN 6 THEN 'START_PAGE'
            WHEN 7 THEN 'FORM_SUBMIT'
            WHEN 8 THEN 'RELOAD'
            WHEN 9 THEN 'KEYWORD'
            WHEN 10 THEN 'KEYWORD_GENERATED'
            ELSE NULL
        END AS CoreTransitionType,
        trim((CASE WHEN visits.transition & 0x00800000 THEN 'BLOCKED, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x01000000 THEN 'FORWARD_BACK, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x02000000 THEN 'FROM_ADDRESS_BAR, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x04000000 THEN 'HOME_PAGE, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x08000000 THEN 'FROM_API, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x10000000 THEN 'CHAIN_START, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x20000000 THEN 'CHAIN_END, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x40000000 THEN 'CLIENT_REDIRECT, ' ELSE '' END ||
        CASE WHEN visits.transition & 0x80000000 THEN 'SERVER_REDIRECT, ' ELSE '' END ||
        CASE WHEN visits.transition & 0xC0000000 THEN 'IS_REDIRECT_MASK, ' ELSE '' END),', ')
        AS Qualifiers,
        Query2.url AS FromURL
        FROM visits
        LEFT JOIN urls ON visits.url = urls.id
        LEFT JOIN (SELECT urls.url,urls.title,visits.visit_time,visits.id FROM visits LEFT JOIN urls ON visits.url = urls.id) Query2 ON visits.from_visit = Query2.id  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} - Web Visits')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(
                report_folder, f'{browser_name} - Web Visits.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Visit Timestamp', 'URL', 'Title', 'Duration',
                            'Transition Type', 'Qualifier(s)',
                            'From Visit URL')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append((row[0], textwrap.fill(row[1], width=100),
                                      row[2], row[3], row[4], row[5], row[6]))
                else:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6]))
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Web Visits'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} - Web Visits'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Web Visits data available')

        #Web Search
        cursor.execute('''
        SELECT
            url,
            title,
            visit_count,
            datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        FROM urls
        WHERE url like '%search?q=%'
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} - Search Terms')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(
                report_folder, f'{browser_name} - Search Terms.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Visit Time', 'Search Term', 'URL', 'Title',
                            'Visit Count')
            data_list = []
            for row in all_rows:
                search = row[0].split('search?q=')[1].split('&')[0]
                search = urllib.parse.unquote(search).replace('+', ' ')
                if wrap_text:
                    data_list.append(
                        (row[3], search, (textwrap.fill(row[0], width=100)),
                         row[1], row[2]))
                else:
                    data_list.append((row[3], search, row[0], row[1], row[2]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Search Terms'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} - Search Terms'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Search Terms data available')

        #Downloads
        # check for last_access_time column, an older version of chrome db (32) does not have it
        if does_column_exist_in_db(db, 'downloads',
                                   'last_access_time') == True:
            last_access_time_query = '''
            CASE last_access_time 
                WHEN "0" 
                THEN "" 
                ELSE datetime(last_access_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
            END AS "Last Access Time"'''
        else:
            last_access_time_query = "'' as last_access_query"

        cursor.execute(f'''
        SELECT 
        CASE start_time  
            WHEN "0" 
            THEN "" 
            ELSE datetime(start_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        END AS "Start Time", 
        CASE end_time 
            WHEN "0" 
            THEN "" 
            ELSE datetime(end_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        END AS "End Time", 
        {last_access_time_query},
        tab_url, 
        target_path, 
        CASE state
            WHEN "0" THEN "In Progress"
            WHEN "1" THEN "Complete"
            WHEN "2" THEN "Canceled"
            WHEN "3" THEN "Interrupted"
            WHEN "4" THEN "Interrupted"
        END,
        CASE danger_type
            WHEN "0" THEN ""
            WHEN "1" THEN "Dangerous"
            WHEN "2" THEN "Dangerous URL"
            WHEN "3" THEN "Dangerous Content"
            WHEN "4" THEN "Content May Be Malicious"
            WHEN "5" THEN "Uncommon Content"
            WHEN "6" THEN "Dangerous But User Validated"
            WHEN "7" THEN "Dangerous Host"
            WHEN "8" THEN "Potentially Unwanted"
            WHEN "9" THEN "Allowlisted by Policy"
            WHEN "10" THEN "Pending Scan"
            WHEN "11" THEN "Blocked - Password Protected"
            WHEN "12" THEN "Blocked - Too Large"
            WHEN "13" THEN "Warning - Sensitive Content"
            WHEN "14" THEN "Blocked - Sensitive Content"
            WHEN "15" THEN "Safe - Deep Scanned"
            WHEN "16" THEN "Dangerous, But User Opened"
            WHEN "17" THEN "Prompt For Scanning"
            WHEN "18" THEN "Blocked - Unsupported Type"
        END,
        CASE interrupt_reason
            WHEN "0" THEN ""
            WHEN "1" THEN "File Error"
            WHEN "2" THEN "Access Denied"
            WHEN "3" THEN "Disk Full"
            WHEN "5" THEN "Path Too Long"
            WHEN "6" THEN "File Too Large"
            WHEN "7" THEN "Virus"
            WHEN "10" THEN "Temporary Problem"
            WHEN "11" THEN "Blocked"
            WHEN "12" THEN "Security Check Failed"
            WHEN "13" THEN "Resume Error"
            WHEN "20" THEN "Network Error"
            WHEN "21" THEN "Operation Timed Out"
            WHEN "22" THEN "Connection Lost"
            WHEN "23" THEN "Server Down"
            WHEN "30" THEN "Server Error"
            WHEN "31" THEN "Range Request Error"
            WHEN "32" THEN "Server Precondition Error"
            WHEN "33" THEN "Unable To Get File"
            WHEN "34" THEN "Server Unauthorized"
            WHEN "35" THEN "Server Certificate Problem"
            WHEN "36" THEN "Server Access Forbidden"
            WHEN "37" THEN "Server Unreachable"
            WHEN "38" THEN "Content Lenght Mismatch"
            WHEN "39" THEN "Cross Origin Redirect"
            WHEN "40" THEN "Canceled"
            WHEN "41" THEN "Browser Shutdown"
            WHEN "50" THEN "Browser Crashed"
        END,
        opened, 
        received_bytes, 
        total_bytes
        FROM downloads
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} - Downloads')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} - Downloads.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Start Time', 'End Time', 'Last Access Time',
                            'URL', 'Target Path', 'State', 'Danger Type',
                            'Interrupt Reason', 'Opened?', 'Received Bytes',
                            'Total Bytes')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Downloads'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} - Downloads'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Downloads data available')

        #Search Terms
        cursor.execute('''
        SELECT
            url_id,
            term,
            id,
            url,
            datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
        FROM keyword_search_terms, urls
        WHERE url_id = id
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(
                f'{browser_name} - Keyword Search Terms')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(
                report_folder,
                f'{browser_name} - Keyword Search Terms.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Visit Time', 'Term', 'URL')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append(
                        (row[4], row[1], (textwrap.fill(row[3], width=100))))
                else:
                    data_list.append((row[4], row[1], row[3]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Keyword Search Terms'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} - Keyword Search Terms'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} - Keyword Search Terms data available')

        db.close()
Beispiel #25
0
def get_powerlogAirdrop(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
                DATETIME(AIRDROP_TIMESTAMP + SYSTEM, 'UNIXEPOCH') AS ADJUSTED_TIMESTAMP,
                STATE,
                SUBEVENT,
                BUNDLEID AS BUNDLE_ID,
                PID,
                DATETIME(AIRDROP_TIMESTAMP, 'UNIXEPOCH') AS ORIGINAL_AIRDROP_TIMESTAMP,
                DATETIME(TIME_OFFSET_TIMESTAMP, 'UNIXEPOCH') AS OFFSET_TIMESTAMP,
                SYSTEM AS TIME_OFFSET,
                AIRDROP_ID AS "PLXPCAGENT_EVENTFORWARD_AIRDROP TABLE ID"
            FROM
                (
                SELECT
                    BUNDLEID,
                    AIRDROP_ID,
                    AIRDROP_TIMESTAMP,
                    TIME_OFFSET_TIMESTAMP,
                    MAX(TIME_OFFSET_ID) AS MAX_ID,
                    SYSTEM,
                    PID,
                    SUBEVENT,
                    STATE
                FROM
                    (
                SELECT
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.TIMESTAMP AS AIRDROP_TIMESTAMP,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.BUNDLEID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.PID,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.SUBEVENT,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.STATE,
                    PLXPCAGENT_EVENTFORWARD_AIRDROP.ID AS "AIRDROP_ID",
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.TIMESTAMP AS TIME_OFFSET_TIMESTAMP,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.ID AS TIME_OFFSET_ID,
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET.SYSTEM,
                    BUNDLEID 
                FROM
                    PLXPCAGENT_EVENTFORWARD_AIRDROP 
                LEFT JOIN
                    PLSTORAGEOPERATOR_EVENTFORWARD_TIMEOFFSET 
                    )
                AS AIRDROPSTATE 
                GROUP BY
                    AIRDROP_ID 
                )
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport(
                    'Powerlog Airdrop Connections Info')
                report.start_artifact_report(report_folder,
                                             'Airdrop Connections Info')
                report.add_script()
                data_headers = ('Adjusted Timestamp', 'State', 'Subevent',
                                'Bundle ID', 'PID',
                                'Original Airdrop Timestamp',
                                'Offset Timestamp', 'Time Offset',
                                'Airdrop Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Airdrop Connections Info'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'KnowledgeC Airdrop Connections Info'
                timeline(report_folder, tlactivity, data_list)

        else:
            logfunc('No data available in Airdop Connection Info')

        db.close()
        return
Beispiel #26
0
def get_kikMessages(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.sqlite'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(ZKIKMESSAGE.ZRECEIVEDTIMESTAMP +978307200,'UNIXEPOCH') AS RECEIVEDTIME,
    datetime(ZKIKMESSAGE.ZTIMESTAMP +978307200,'UNIXEPOCH') as TIMESTAMP,
    ZKIKMESSAGE.ZBODY,
    case ZKIKMESSAGE.ZTYPE
        when 1 then 'Received'
        when 2 then 'Sent'
        when 3 then 'Group Admin'
        when 4 then 'Group Message'
        else 'Unknown' end as 'Type',
    ZKIKMESSAGE.ZUSER,
    ZKIKUSER.ZDISPLAYNAME,
    ZKIKUSER.ZUSERNAME,
    ZKIKATTACHMENT.ZCONTENT
    from ZKIKMESSAGE
    left join ZKIKUSER on ZKIKMESSAGE.ZUSER = ZKIKUSER.Z_PK
    left join ZKIKATTACHMENT on ZKIKMESSAGE.Z_PK = ZKIKATTACHMENT.ZMESSAGE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:

            attachmentName = str(row[7])
            thumb = ''

            for match in files_found:
                if attachmentName in match:
                    shutil.copy2(match, report_folder)
                    data_file_name = os.path.basename(match)
                    thumb = f'<img src="{report_folder}{data_file_name}"  width="300"></img>'

            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], thumb))

        description = 'Kik Messages'
        report = ArtifactHtmlReport('Kik Messages')
        report.start_artifact_report(report_folder, 'Kik Messages',
                                     description)
        report.add_script()
        data_headers = ('Received Time', 'Timestamp', 'Message', 'Type',
                        'User', 'Display Name', 'User Name', 'Attachment Name',
                        'Attachment')
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Attachment'])
        report.end_artifact_report()

        tsvname = 'Kik Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Kik Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Kik Messages data available')

    cursor.execute('''
    SELECT
    Z_PK,
    ZDISPLAYNAME,
    ZUSERNAME,
    ZEMAIL,
    ZJID,
    ZFIRSTNAME,
    ZLASTNAME,
    datetime(ZPPTIMESTAMP/1000,'unixepoch'),
    ZPPURL
    FROM ZKIKUSER
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8]))

        description = 'Kik Users'
        report = ArtifactHtmlReport('Kik Users')
        report.start_artifact_report(report_folder, 'Kik Users', description)
        report.add_script()
        data_headers = ('PK', 'Display Name', 'User Name', 'Email', 'JID',
                        'First Name', 'Last Name', 'Profile Pic Timestamp',
                        'Profile Pic URL')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Kik Users'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Kik Users'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Kik Users data available')

    db.close()
    return
Beispiel #27
0
def get_knowCwebusage(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for KnowledgC Web Usage" + iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/knowledge_app_webusage.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
		DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
		DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
		ZOBJECT.ZVALUESTRING AS "APP NAME", 
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
		(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",   
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBDOMAIN AS "DOMAIN",
		ZSTRUCTUREDMETADATA .Z_DKDIGITALHEALTHMETADATAKEY__WEBPAGEURL AS "URL",
		ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)",
		CASE ZOBJECT.ZSTARTDAYOFWEEK 
			WHEN "1" THEN "Sunday"
			WHEN "2" THEN "Monday"
			WHEN "3" THEN "Tuesday"
			WHEN "4" THEN "Wednesday"
			WHEN "5" THEN "Thursday"
			WHEN "6" THEN "Friday"
			WHEN "7" THEN "Saturday"
		END "DAY OF WEEK",
		ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
		DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
		ZOBJECT.ZUUID AS "UUID", 
		ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
	FROM
		ZOBJECT 
		LEFT JOIN
			ZSTRUCTUREDMETADATA 
			ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
		LEFT JOIN
			ZSOURCE 
			ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
	WHERE
		ZSTREAMNAME = "/app/webUsage" 
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('KnowledgeC Web Usage')
        report.start_artifact_report(report_folder, 'Web Usage', description)
        report.add_script()
        data_headers = ('Start', 'End', 'App Name', 'Usage in Seconds',
                        'Usage in Minutes', 'Domain', 'URL', 'Device ID',
                        'Day of the Wekk', 'GMT Offset', 'Entry Creation',
                        'UUID', 'Zobject Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'KnowledgeC Web Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'KnowledgeC Web Usage'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Beispiel #28
0
def get_Whatsapp(files_found, report_folder, seeker, wrap_text):

    source_file_msg = ''
    source_file_wa = ''
    whatsapp_msgstore_db = ''
    whatsapp_wa_db = ''

    for file_found in files_found:

        file_name = str(file_found)
        if file_name.endswith('msgstore.db'):
            whatsapp_msgstore_db = str(file_found)
            source_file_msg = file_found.replace(seeker.directory, '')

        if file_name.endswith('wa.db'):
            whatsapp_wa_db = str(file_found)
            source_file_wa = file_found.replace(seeker.directory, '')

    db = open_sqlite_db_readonly(whatsapp_msgstore_db)
    cursor = db.cursor()
    try:
        cursor.execute('''
        SELECT case CL.video_call when 1 then "Video Call" else "Audio Call" end as call_type, 
               CL.timestamp/1000 as start_time, 
               ((cl.timestamp/1000) + CL.duration) as end_time, 
               case CL.from_me when 0 then "Incoming" else "Outgoing" end as call_direction,
		       J1.raw_string AS from_id,
                            group_concat(J.raw_string) AS group_members
                     FROM   call_log_participant_v2 AS CLP
                            JOIN call_log AS CL
                              ON CL._id = CLP.call_log_row_id
                            JOIN jid AS J
                              ON J._id = CLP.jid_row_id
                            JOIN jid as J1
                              ON J1._id = CL.jid_row_id
                            GROUP  BY CL._id
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Whatsapp - Group Call Logs')
        report.start_artifact_report(report_folder,
                                     'Whatsapp - Group Call Logs')
        report.add_script()
        data_headers = (
            'Start Time', 'End Time', 'Call Type', 'Call Direction', 'From ID',
            'Group Members'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            starttime = datetime.datetime.fromtimestamp(int(
                row[1])).strftime('%Y-%m-%d %H:%M:%S')
            endtime = datetime.datetime.fromtimestamp(int(
                row[2])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append(
                (starttime, endtime, row[0], row[3], row[4], row[5]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Whatsapp - Group Call Logs'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_msg)

        tlactivity = f'Whatsapp - Group Call Logs'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Whatsapp Group Call Logs found')

    try:
        cursor.execute('''
                     SELECT CL.timestamp/1000 as start_time, 
                            case CL.video_call when 1 then "Video Call" else "Audio Call" end as call_type, 
                            ((CL.timestamp/1000) + CL.duration) as end_time, 
                            J.raw_string AS num, 
                            case CL.from_me when 0 then "Incoming" else "Outgoing" end as call_direction
                     FROM   call_log AS CL 
                            JOIN jid AS J 
                              ON J._id = CL.jid_row_id 
                     WHERE  CL._id NOT IN (SELECT DISTINCT call_log_row_id 
                                           FROM   call_log_participant_v2) 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Whatsapp - Single Call Logs')
        report.start_artifact_report(report_folder,
                                     'Whatsapp - Single Call Logs')
        report.add_script()
        data_headers = (
            'Start Time', 'Call Type', 'End Time', 'Number', 'Call Direction'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            starttime = datetime.datetime.fromtimestamp(int(
                row[0])).strftime('%Y-%m-%d %H:%M:%S')
            endtime = datetime.datetime.fromtimestamp(int(
                row[2])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append((starttime, row[1], endtime, row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Whatsapp - Single Call Logs'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_msg)

        tlactivity = f'Whatsapp - Single Call Logs'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Whatsapp Single Call Log available')

    cursor.execute('''attach database "''' + whatsapp_wa_db + '''" as wadb ''')

    try:
        cursor.execute('''
                    SELECT messages.key_remote_jid  AS id, 
                           case 
                              when contact_book_w_groups.recipients is null then messages.key_remote_jid
                              else contact_book_w_groups.recipients
                           end as recipients, 
                           key_from_me              AS direction, 
                           messages.data            AS content, 
                           messages.timestamp/1000       AS send_timestamp, 
                           messages.received_timestamp/1000, 
                           case 
                              when messages.remote_resource is null then messages.key_remote_jid 
                              else messages.remote_resource
                           end AS group_sender,
                           messages.media_url       AS attachment
                    FROM   (SELECT jid, 
                                   recipients 
                            FROM   wadb.wa_contacts AS contacts 
                                   left join (SELECT gjid, 
                                                     Group_concat(CASE 
                                                                    WHEN jid == "" THEN NULL 
                                                                    ELSE jid 
                                                                  END) AS recipients 
                                              FROM   group_participants 
                                              GROUP  BY gjid) AS groups 
                                          ON contacts.jid = groups.gjid 
                            GROUP  BY jid) AS contact_book_w_groups 
                           join messages 
                             ON messages.key_remote_jid = contact_book_w_groups.jid
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Whatsapp - Messages')
        report.start_artifact_report(report_folder, 'Whatsapp - Messages')
        report.add_script()
        data_headers = (
            'Send Timestamp', 'Received Timestamp', 'Message ID', 'Recipients',
            'Direction', 'Content', 'Group Sender', 'Attachment'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            sendtime = datetime.datetime.fromtimestamp(int(
                row[4])).strftime('%Y-%m-%d %H:%M:%S')
            receivetime = datetime.datetime.fromtimestamp(int(
                row[5])).strftime('%Y-%m-%d %H:%M:%S')

            data_list.append((sendtime, receivetime, row[0], row[1], row[2],
                              row[3], row[6], row[7]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Whatsapp - Messages'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_msg)

        tlactivity = f'Whatsapp - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Whatsapp messages data available')

    db.close()

    db = open_sqlite_db_readonly(whatsapp_wa_db)
    cursor = db.cursor()
    try:
        cursor.execute('''
                     SELECT jid, 
                            CASE 
                              WHEN WC.number IS NULL THEN WC.jid 
                              WHEN WC.number == "" THEN WC.jid 
                              ELSE WC.number 
                            END number, 
                            CASE 
                              WHEN WC.given_name IS NULL 
                                   AND WC.family_name IS NULL 
                                   AND WC.display_name IS NULL THEN WC.jid 
                              WHEN WC.given_name IS NULL 
                                   AND WC.family_name IS NULL THEN WC.display_name 
                              WHEN WC.given_name IS NULL THEN WC.family_name 
                              WHEN WC.family_name IS NULL THEN WC.given_name 
                              ELSE WC.given_name 
                                   || " " 
                                   || WC.family_name 
                            END name 
                     FROM   wa_contacts AS WC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Whatsapp - Contacts')
        report.start_artifact_report(report_folder, 'Whatsapp - Contacts')
        report.add_script()
        data_headers = (
            'Number', 'Name'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Whatsapp - Contacts'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_wa)

    else:
        logfunc('No Whatsapp Contacts found')

    db.close

    for file_found in files_found:
        if ('com.whatsapp_preferences_light.xml' in file_found):
            with open(file_found, encoding='utf-8') as fd:
                xml_dict = xmltodict.parse(fd.read())
                string_dict = xml_dict.get('map', '').get('string', '')
                data = []
                for i in range(len(string_dict)):
                    if (string_dict[i]['@name'] == 'push_name'
                        ):  # User Profile Name
                        data.append(string_dict[i]['#text'])
                    if (string_dict[i]['@name'] == 'my_current_status'
                        ):  # User Current Status
                        data.append(string_dict[i]['#text'])
                    if (string_dict[i]['@name'] == 'version'
                        ):  # User current whatsapp version
                        data.append(string_dict[i]['#text'])
                    if (string_dict[i]['@name'] == 'ph'):  # User Mobile Number
                        data.append(string_dict[i]['#text'])
                    if (string_dict[i]['@name'] == 'cc'):  # User country code
                        data.append(string_dict[i]['#text'])

                if (len(data) > 0):
                    report = ArtifactHtmlReport('Whatsapp - User Profile')
                    report.start_artifact_report(report_folder,
                                                 'Whatsapp - User Profile')
                    report.add_script()
                    data_headers = ('Version', 'Name', 'User Status',
                                    'Country Code', 'Mobile Number')
                    data_list = []
                    data_list.append(
                        (data[0], data[3], data[2], data[1], data[4]))
                    report.write_artifact_data_table(data_headers,
                                                     data_list,
                                                     file_found,
                                                     html_escape=False)
                    report.end_artifact_report()

                    tsvname = "Whatsapp - User Profile"
                    tsv(report_folder, data_headers, data_list, tsvname)

                    tlactivity = "Whatsapp - User Profile"
                    timeline(report_folder, tlactivity, data_list,
                             data_headers)
                else:
                    logfunc("No Whatsapp - Profile data found")
    return
Beispiel #29
0
def get_knowCusage(files_found, report_folder, seeker):
	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
			DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
			DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
			ZOBJECT.ZVALUESTRING AS "BUNDLE ID", 
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
			(ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES",  
			ZSOURCE.ZDEVICEID AS "DEVICE ID (HARDWARE UUID)", 
			CASE ZOBJECT.ZSTARTDAYOFWEEK 
				WHEN "1" THEN "Sunday"
				WHEN "2" THEN "Monday"
				WHEN "3" THEN "Tuesday"
				WHEN "4" THEN "Wednesday"
				WHEN "5" THEN "Thursday"
				WHEN "6" THEN "Friday"
				WHEN "7" THEN "Saturday"
			END "DAY OF WEEK",
			ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
			DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION", 
			ZOBJECT.ZUUID AS "UUID",
			ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
		FROM
			ZOBJECT 
			LEFT JOIN
				ZSTRUCTUREDMETADATA 
				ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
			LEFT JOIN
				ZSOURCE 
				ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
		WHERE
			ZSTREAMNAME = "/app/usage" 
	)
	""")
	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	data_list = []
	if usageentries > 0:    
		for row in all_rows:
			data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))

		description = ''
		report = ArtifactHtmlReport('KnowledgeC App Usage')
		report.start_artifact_report(report_folder, 'KnowledgeC App Usage', description)
		report.add_script()
		data_headers = ('Start','End','Bundle ID','Usage in Seconds','Usage in Minutes','Device ID','Day of the Week','GMT Offset','Entry Creation','UUID','Zobject Table ID' )     
		report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
		report.end_artifact_report()

		tsvname = 'KnowledgeC App Usage'
		tsv(report_folder, data_headers, data_list, tsvname)
	
		tlactivity = 'KnowledgeC App Usage'
		timeline(report_folder, tlactivity, data_list, data_headers)
	else:
		logfunc('No data available in table')
Beispiel #30
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_appharvest.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute("""
		SELECT
		DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
		BUNDLEID AS "BUNDLE ID",
		LATITUDE || ", " || LONGITUDE AS "COORDINATES",
		ALTITUDE AS "ALTITUDE",
		HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
		VERTICALACCURACY AS "VERTICAL ACCURACY",
		STATE AS "STATE",
		AGE AS "AGE",
		ROUTINEMODE AS "ROUTINE MODE",
		LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
		HEX(SIG) AS "SIG (HEX)",
		LATITUDE AS "LATITUDE",
		LONGITUDE AS "LONGITUDE",
		SPEED AS "SPEED",
		COURSE AS "COURSE",
		CONFIDENCE AS "CONFIDENCE"
		FROM APPHARVEST
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Coordinates',
                            'Altitude', 'Horizontal Accuracy',
                            'Vertical Accuracy', 'State', 'Age',
                            'Routine Mode', 'Location of Interest Type',
                            'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                            'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_cdmacelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	SID AS "SID",
	NID AS "NID",
	BSID AS "BSID",
	ZONEID AS "ZONEID",
	BANDCLASS AS "BANDCLASS",
	CHANNEL AS "CHANNEL",
	PNOFFSET AS "PNOFFSET",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CDMACELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13],
                 row[14], row[15], row[16], row[17]))

        description = ''
        report = ArtifactHtmlReport('LocationD CDMA Location')
        report.start_artifact_report(report_folder, 'CDMA Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'SID', 'NID',
                        'BSID', 'ZONEID', 'BANDCLASS', 'Channel', 'PNOFFSET',
                        'Altitude', 'Speed', 'Course', 'Confidence',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD CDMA Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD CDMA Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD CDMA Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD CDMA Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_celllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP", 
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	LAC AS "LAC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PSC AS "PSC",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD Cell Location')
        report.start_artifact_report(report_folder, 'Cell Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'LAC', 'CI',
                        'UARFCN', 'PSC', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD Cell Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD Cell Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_ltecelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT 
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PID AS "PID",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM LTECELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('LocationD LTE Location')
        report.start_artifact_report(report_folder, 'LTE Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'CI',
                        'UARFCN', 'PID', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD LTE Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD LTE Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD LTE Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_wifilocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')