Exemplo n.º 1
0
def get_cacheRoutesGmap(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        filename = os.path.basename(file_found)
        noext = os.path.splitext(filename)[0]
        noext = int(noext)
        datetime_time = datetime.datetime.fromtimestamp(noext/1000)
        datetime_time = str(datetime_time)
        with open(file_found, 'rb') as f:
            deserialized = plistlib.load(f)
            length = len(deserialized['$objects'])
            for x in range(length):
                try: 
                    lat = deserialized['$objects'][x]['_coordinateLat']
                    lon = deserialized['$objects'][x]['_coordinateLong'] #lat longs
                    data_list.append((datetime_time, lat, lon, file_found))
                except:
                    pass    
            
    if len(data_list) > 0:
        description = 'Google Maps Cache Routes'
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder, 'Google Maps Cache Routes', description)
        report.add_script()
        data_headers = ('Timestamp','Latitude','Longitude','Source File')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Google Maps Cache Routes'
        tsv(report_folder, data_headers, data_list, tsvname)
    
        kmlactivity = 'Google Maps Cache Routes'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
Exemplo n.º 2
0
def get_locationDwifilocB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_wifilocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')

    db.close()
    return
Exemplo n.º 3
0
def get_locationDparkedhistorical(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("11"):
        logfunc("Unsupported version for RoutineD Parked Historical " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_vehicle_parked.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
		   DATETIME(ZRTVEHICLEEVENTHISTORYMO.ZDATE + 978307200, 'UNIXEPOCH') AS "DATE",
		   DATETIME(ZRTVEHICLEEVENTHISTORYMO.ZLOCDATE + 978307200, 'UNIXEPOCH') AS "LOCATION DATE",
		   ZLOCLATITUDE || ", " || ZLOCLONGITUDE AS "COORDINATES",
		   ZLOCUNCERTAINTY AS "LOCATION UNCERTAINTY",
		   ZIDENTIFIER AS "IDENTIFIER",
		   ZLOCLATITUDE AS "LATITUDE",
		   ZLOCLONGITUDE AS "LONGITUDE",
		   ZRTVEHICLEEVENTHISTORYMO.Z_PK AS "ZRTLEARNEDVISITMO TABLE ID" 
		FROM
		   ZRTVEHICLEEVENTHISTORYMO
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        description = ''
        report = ArtifactHtmlReport('RoutineD Parked Vehicle Historical')
        report.start_artifact_report(report_folder,
                                     'Parked Vehicle Historical', description)
        report.add_script()
        data_headers = ('Timestamp', 'Location Date', 'Coordinates',
                        'Location Uncertainty', 'Identifier', 'Latitude',
                        'Longitude', 'Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'RoutineD Parked Vehicle Historical'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'RoutineD Parked Vehicle Historical'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'RoutineD Parked Vehicle Historical'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available in Routine Parked Vehicle Historical')

    db.close()
    return
Exemplo n.º 4
0
def get_airGuard(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)

    cursor = db.cursor()
    cursor.execute('''
    SELECT
    device.lastSeen AS "Last Time Device Seen",
    beacon.receivedAt AS "Time (Local)",
    beacon.deviceAddress AS "Device MAC Address",
    beacon.longitude AS "Latitude",
    beacon.latitude as "Longitude",
    beacon.rssi AS "Signal Strength (RSSI)",
    device.firstDiscovery AS "First Time Device Seen",
    device.lastNotificationSent as "Last Time User Notified"
    FROM
    beacon
    LEFT JOIN device on device.address=beacon.deviceAddress
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('AirGuard AirTag Tracker')
        report.start_artifact_report(report_folder, 'AirGuard AirTag Tracker')
        report.add_script()
        data_headers = ('Last Time Device Seen', 'Time (Local)',
                        'Device MAC Address', 'Latitude', 'Longitude',
                        'Signal Strength (RSSI)', 'First Time Device Seen',
                        'Last Time User Notified')
        data_headers_kml = ('Timestamp', 'Time (Local)', 'Device MAC Address',
                            'Latitude', 'Longitude', 'Signal Strength (RSSI)',
                            'First Time Device Seen',
                            'Last Time User Notified')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'AirGuard AirTag Tracker'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'AirGuard AirTag Tracker'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'AirGuard AirTag Tracker'
        kmlgen(report_folder, kmlactivity, data_list, data_headers_kml)

    else:
        logfunc('No AirGuard AirTag Tracker data available')

    db.close()
Exemplo n.º 5
0
def get_tileAppDb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)
        
        if file_found.endswith('tile-TileNetworkDB.sqlite'):
            break
            
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(ZTIMESTAMP,'unixepoch','31 years'),
    ZNAME,
    datetime(ZACTIVATION_TIMESTAMP,'unixepoch','31 years'),
    datetime(ZREGISTRATION_TIMESTAMP,'unixepoch','31 years'),
    ZALTITUDE, 
    ZLATITUDE, 
    ZLONGITUDE,
    ZID,
    ZNODE_TYPE, 
    ZSTATUS,
    ZIS_LOST,
    datetime(ZLAST_LOST_TILE_COMMUNITY_CONNECTION,'unixepoch','31 years')
    FROM ZTILENTITY_NODE INNER JOIN ZTILENTITY_TILESTATE ON ZTILENTITY_NODE.ZTILE_STATE = ZTILENTITY_TILESTATE.Z_PK
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []    
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11]))
        
            description = ''
            report = ArtifactHtmlReport('Tile App - Tile Information & Geolocation')
            report.start_artifact_report(report_folder, 'Tile App DB Info & Geolocation', description)
            report.add_script()
            data_headers = ('Timestamp','Tile Name','Activation Timestamp','Registration Timestamp','Altitude','Latitude','Longitude','Tile ID','Tile Type','Status','Is Lost?','Last Community Connection' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Tile App DB Info Geolocation'
            tsv(report_folder, data_headers, data_list, tsvname)
        
            tlactivity = 'Tile App DB Info Geolocation'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
            kmlactivity = 'Tile App DB Info Geolocation'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No Tile App DB data available')

    db.close()
    return 
Exemplo n.º 6
0
def get_locationDparkedhistorical(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("11"):
        logfunc("Unsupported version for RoutineD Parked Historical " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute("""
	select
	datetime(zrtvehicleeventhistorymo.zdate + 978307200, 'unixepoch'),
	datetime(zrtvehicleeventhistorymo.zlocdate + 978307200, 'unixepoch'),
	zlocuncertainty,
	zidentifier,
	zloclatitude,
	zloclongitude
	from
	zrtvehicleeventhistorymo
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        description = ''
        report = ArtifactHtmlReport('RoutineD Parked Vehicle Historical')
        report.start_artifact_report(report_folder,
                                     'Parked Vehicle Historical', description)
        report.add_script()
        data_headers = ('Timestamp', 'Location Date', 'Location Uncertainty',
                        'Identifier', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'RoutineD Parked Vehicle Historical'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'RoutineD Parked Vehicle Historical'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'RoutineD Parked Vehicle Historical'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available in Routine Parked Vehicle Historical')

    db.close()
    return
Exemplo n.º 7
0
def get_weatherAppLocations(files_found, report_folder, seeker):
    data_list = []

    for file_found in files_found:
        file_found = str(file_found)
        with open(file_found, "rb") as plist_file:
            plist_content = plistlib.load(plist_file)

            for city in plist_content['Cities']:
                update_time = city['UpateTime']
                update_time_formatted = update_time.strftime(
                    '%Y-%m-%d %H:%M:%S')

                data_list.append((update_time_formatted, 'Added from User', '',
                                  city['Lat'], city['Lon'], city['Name'],
                                  city['Country'], city['SecondsFromGMT']))

            local_weather = plist_content['LocalWeather']
            local_update_time = local_weather['UpateTime']
            local_update_time_formatted = local_update_time.strftime(
                '%Y-%m-%d %H:%M:%S')
            last_location_update = time.strftime(
                "%Y-%m-%d %H:%M:%S",
                time.gmtime(plist_content.get('LastLocationUpdateTime')))

            data_list.append(
                (local_update_time_formatted, 'Local', last_location_update,
                 local_weather['Lat'], local_weather['Lon'],
                 local_weather['Name'], local_weather['Country'],
                 local_weather['SecondsFromGMT']))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Weather App Locations')
        report.start_artifact_report(report_folder, 'Weather App Locations')
        report.add_script()
        data_headers = ("Update Time", "Type", "Last Location Update",
                        "Latitude", "Longitude", "City", "Country",
                        "Seconds from GMT")
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Weather App Locations'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Weather App Locations'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'Weather App Locations'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)

    else:
        logfunc('No data available for Weather App Locations')
Exemplo n.º 8
0
def get_cmh(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(images.datetaken /1000, "unixepoch") as datetaken,
    datetime(images.date_added, "unixepoch") as dateadded,
    datetime(images.date_modified, "unixepoch") as datemodified,
    images.title,
    images.bucket_display_name,
    images.latitude,
    images.longitude,
    location_view.address_text,
    location_view.uri,
    images._data,
    images.isprivate
    FROM images
    left join location_view
    on location_view._id = images._id
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung CMH')
        report.start_artifact_report(report_folder, f'Geodata')
        report.add_script()
        data_headers = ('Timestamp', 'Date Added', 'Date Modified', 'Title',
                        'Bucket Name', 'Latitude', 'Longitude', 'Address',
                        'URI', 'Data Location', 'Is Private')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10]))
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Samsung CMH Geodata'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Samsung CMH Geodata'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'Samsung CMH Geodata'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)

    else:
        logfunc(f'No Samsung_CMH_GeoData available')
    db.close()
    return
Exemplo n.º 9
0
def get_waze(files_found, report_folder, seeker, wrap_text):
    
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    
    cursor = db.cursor()
    cursor.execute('''
    select
    datetime(PLACES.created_time, 'unixepoch'),
    datetime(RECENTS.access_time, 'unixepoch'),
    RECENTS.name,
    PLACES.name as "Address",
    round(PLACES.latitude*.000001,6),
    round(PLACES.longitude*.000001,6)
    from PLACES
    join RECENTS on PLACES.id = RECENTS.id
    ''')
    
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Waze - Recently Searched Locations')
        report.start_artifact_report(report_folder, 'Waze - Recently Searched Locations')
        report.add_script()
        data_headers = ('Created Timestamp','Accessed Timestamp','Location Name','Address','Latitude','Longitude')
        data_headers_kml = ('Timestamp','Accessed Timestamp','Location Name','Address','Latitude','Longitude')
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))
            
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'Waze - Recently Searched Locations'
        tsv(report_folder, data_headers, data_list, tsvname)
        
        tlactivity = f'Waze - Recently Searched Locations'
        timeline(report_folder, tlactivity, data_list, data_headers)
        
        kmlactivity = 'Waze - Recently Searched Locations'
        kmlgen(report_folder, kmlactivity, data_list, data_headers_kml)
        
    else:
        logfunc('No Waze - Recently Searched Locations data available')
        
    db.close()

    
Exemplo n.º 10
0
def get_ooklaSpeedtestData(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('speedtest.sqlite'):
            break

    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        datetime(("ZDATE")+strftime('%s', '2001-01-01 00:00:00'), 'unixepoch') as 'Date',
        "ZEXTERNALIP" as 'External IP Address',
        "ZINTERNALIP" as 'Internal IP Address',
        "ZCARRIERNAME" as 'Carrier Name',
        "ZISP" as 'ISP',
        "ZWIFISSID" as 'Wifi SSID',
        "ZWANTYPE" as 'WAN Type',
        CASE "ZDEVICEMODEL" 
            WHEN "iPad3,1"
                THEN "iPad 3rd Gen (Wi-Fi Only)"
            WHEN "iPad3,2"
                THEN "iPad 3rd Gen (Wi-Fi/Cellular Verizon/GPS)"
            WHEN "iPad3,3"
                THEN "iPad 3rd Gen (Wi-Fi/Cellular AT&T/GPS)"
            WHEN "iPad3,4"
                THEN "iPad 4th Gen (Wi-Fi Only)"
            WHEN "iPad3,5"
                THEN "iPad 4th Gen (Wi-Fi/AT&T/GPS)"
            WHEN "iPad3,6"
                THEN "iPad 4th Gen (Wi-Fi/Verizon & Sprint/GPS)"
            WHEN "iPad6,11"
                THEN "iPad 9.7 5th Gen (Wi-Fi Only)"
            WHEN "iPad6,12"
                THEN "iPad 9.7 5th Gen (Wi-Fi/Cellular)"
            WHEN "iPad7,5"
                THEN "iPad 9.7 6th Gen (Wi-Fi Only)"
            WHEN "iPad7,6"
                THEN "iPad 9.7 6th Gen (Wi-Fi/Cellular)"
            WHEN "iPad7,11"
                THEN "iPad 10.2 7th Gen (Wi-Fi Only)"
            WHEN "iPad7,12"
                THEN "iPad 10.2 7th Gen (Wi-Fi/Cellular Global)"
            WHEN "iPad11,3"
                THEN "iPad Air 3rd Gen (Wi-Fi Only)"
            WHEN "iPad11,4"
                THEN "iPad Air 3rd Gen (Wi-Fi+Cell)"
            WHEN "iPad2,5"
                THEN "iPad mini Wi-Fi Only/1st Gen"
            WHEN "iPad2,6"
                THEN "iPad mini Wi-Fi/AT&T/GPS - 1st Gen"
            WHEN "iPad2,7"
                THEN "iPad mini Wi-Fi/VZ & Sprint/GPS - 1st Gen"
            WHEN "iPad4,4"
                THEN "iPad mini 2 (Retina/2nd Gen Wi-Fi Only)"
            WHEN "iPad4,5"
                THEN "iPad mini 2 (Retina/2nd Gen Wi-Fi/Cellular)"
            WHEN "iPad4,7"
                THEN "iPad mini 3 (Wi-Fi Only)"
            WHEN "iPad4,8"
                THEN "iPad mini 3 (Wi-Fi/Cellular)"
            WHEN "iPad5,1"
                THEN "iPad mini 4 (Wi-Fi Only)"
            WHEN "iPad5,2"
                THEN "iPad mini 4 (Wi-Fi/Cellular)"
            WHEN "iPad11,1"
                THEN "iPad mini 5th Gen (Wi-Fi Only)"
            WHEN "iPad11,2"
                THEN "iPad mini 5th Gen (Wi-Fi+Cell)"
            WHEN "iPad6,7" 
                THEN "iPad Pro 12.9 (Wi-Fi Only)"
            WHEN "iPad6,8" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cellular)"
            WHEN "iPad6,3" 
                THEN "iPad Pro 9.7 (Wi-Fi Only)"
            WHEN "iPad6,4" 
                THEN "iPad Pro 9.7 (Wi-Fi/Cellular)"
            WHEN "iPad7,3" 
                THEN "iPad Pro 10.5 (Wi-Fi Only)"
            WHEN "iPad7,4" 
                THEN "iPad Pro 10.5 (Wi-Fi/Cellular)"
            WHEN "iPad7,1" 
                THEN "iPad Pro 12.9 (Wi-Fi Only - 2nd Gen)"
            WHEN "iPad7,2" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cell - 2nd Gen)"
            WHEN "iPad8,9" 
                THEN "iPad Pro 11 (Wi-Fi Only - 2nd Gen)"
            WHEN "iPad8,10" 
                THEN "iPad Pro 11 (Wi-Fi/Cell - 2nd Gen)"
            WHEN "iPad8,11" 
                THEN "iPad Pro 12.9 (Wi-Fi Only - 4th Gen)"
            WHEN "iPad8,12" 
                THEN "iPad Pro 12.9 (Wi-Fi/Cell - 4th Gen)"
            WHEN "iPhone8,4" 
                THEN "iPhone SE (United States/A1662)"
            WHEN "iPhone9,1" 
                THEN "iPhone 7 (Verizon/Sprint/China/A1660)"
            WHEN "iPhone9,3" 
                THEN "iPhone 7 (AT&T/T-Mobile/A1778)"
            WHEN "iPhone9,2" 
                THEN "iPhone 7 Plus (Verizon/Sprint/China/A1661)"
            WHEN "iPhone9,4" 
                THEN "iPhone 7 Plus (AT&T/T-Mobile/A1784)"
            WHEN "iPhone10,1" 
                THEN "iPhone 8 (Verizon/Sprint/China/A1863)"
            WHEN "iPhone10,4" 
                THEN "iPhone 8 (AT&T/T-Mobile/Global/A1905)"
            WHEN "iPhone10,2" 
                THEN "iPhone 8 Plus (Verizon/Sprint/China/A1864)"
            WHEN "iPhone10,5" 
                THEN "iPhone 8 Plus (AT&T/T-Mobile/Global/A1897)"
            WHEN "iPhone10,3" 
                THEN "iPhone X (Verizon/Sprint/China/A1865)"
            WHEN "iPhone10,6" 
                THEN "iPhone X (AT&T/T-Mobile/Global/A1901)"
            WHEN "iPhone11,2" 
                THEN "iPhone Xs (A1920/A2097/A2098/A2100)"
            WHEN "iPhone11,6" 
                THEN "iPhone Xs Max (A1921/A2101/A2101/A2104)"
            WHEN "iPhone11,8" 
                THEN "iPhone XR (A1984/A2105/A2106/A2108)"
            WHEN "iPhone12,1" 
                THEN "iPhone 11 (A2111/A2221/A2223)"
            WHEN "iPhone12,3" 
                THEN "iPhone 11 Pro (A2160/A2215/A2217)"
            WHEN "iPhone12,5" 
                THEN "iPhone 11 Pro Max (A2161/A2218/A2220)"
                    ELSE "ZDEVICEMODEL"
            END 'Device Model',
        "ZLATITUDE" as 'Latitude',
        "ZLONGITUDE" as 'Longitude',
        "ZHORIZONTALACCURACY" as 'Accuracy in Meters'
        FROM ZSPEEDTESTRESULT
        
        ORDER BY "ZDATE" DESC	
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10]))

            description = ''
            report = ArtifactHtmlReport('Applications')
            report.start_artifact_report(report_folder, 'Ookla Speedtest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'External IP Address',
                            'Internal IP Address', 'Carrier Name', 'ISP',
                            'Wifi SSID', 'WAN Type', 'Device Model',
                            'Latitude', 'Longitude', 'Accuracy in Meters')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Ookla Speedtest Data'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Ookla Speedtest Data'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Ookla Speedtest Data'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No Ookla Speedtest Application data available')

        db.close()
        return
Exemplo n.º 11
0
def get_whatsappMessages(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.sqlite'):
            break
    data_list = []
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    datetime(ZMESSAGEDATE+978307200, 'UNIXEPOCH'),
    ZISFROMME,
    ZPARTNERNAME,
    ZFROMJID,
    ZTOJID,
    ZWAMESSAGE.ZMEDIAITEM,
    ZTEXT,
    ZSTARRED,
    ZMESSAGETYPE,
    ZLONGITUDE,
    ZLATITUDE,
    ZMEDIALOCALPATH,
    ZXMPPTHUMBPATH
    FROM ZWAMESSAGE
    left JOIN ZWAMEDIAITEM
    on ZWAMESSAGE.Z_PK = ZWAMEDIAITEM.ZMESSAGE 
    left JOIN ZWACHATSESSION
    on ZWACHATSESSION.Z_PK = ZWAMESSAGE.ZCHATSESSION
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)

    if usageentries > 0:
        for row in all_rows:

            if row[1] == 1:
                sender = 'Local User'
                receiver = row[2]
            else:
                sender = row[2]
                receiver = 'Local User'

            if row[8] == 5:
                lon = row[9]
                lat = row[10]
            else:
                lat = ''
                lon = ''

            attfile = row[11]
            attachment = row[12]
            localpath = row[11]

            if attachment is not None:
                for match in files_found:
                    if attachment in match:
                        shutil.copy2(match, report_folder)
                        data_file_name = os.path.basename(match)
                        thumb = f'<img src="{report_folder}/{data_file_name}"></img>'
            else:
                thumb = ''

            if attfile is not None:
                for matchf in files_found:
                    if attfile in matchf:
                        shutil.copy2(matchf, report_folder)
                        data_file_namef = os.path.basename(matchf)
                        attfile = f'<img src="{report_folder}/{data_file_namef}" width="300"></img>'
            else:
                attfile = ''

            data_list.append((
                row[0],
                sender,
                row[3],
                receiver,
                row[4],
                row[6],
                attfile,
                thumb,
                localpath,
                row[7],
                lat,
                lon,
            ))

        description = 'Whatsapp - Messages'
        report = ArtifactHtmlReport('Whatsapp - Messages')
        report.start_artifact_report(report_folder, 'Whatsapp - Messages')
        report.add_script()
        data_headers = (
            'Timestamp',
            'Sender Name',
            'From ID',
            'Receiver',
            'To ID',
            'Message',
            'Attachment File',
            'Thumb',
            'Attachment Local Path',
            'Starred?',
            'Latitude',
            'Longitude',
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'Whatsapp - Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Whatsapp - Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'Whatsapp - Messages'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)

    else:
        logfunc('Whatsapp - Messages data available')
Exemplo n.º 12
0
def get_airtags(files_found, report_folder, seeker):
    data_list_info = []
    data_list_safeloc = []
    data_list_location = []
    data_list_crowdloc = []
    
    for file_found in files_found:
        file_found = str(file_found)
        
        with open(file_found, 'r') as f:
            deserialized = json.load(f)
            
        for x in deserialized:
            
            name = (x['name'])
            ptype = (x['productType'].get('type'))
            maname = (x['productType']['productInformation'].get('manufacturerName'))
            pid = (x['productType']['productInformation'].get('productIdentifier'))
            vid = (x['productType']['productInformation'].get('vendorIdentifier'))
            ap = (x['productType']['productInformation'].get('antennaPower'))
            
            gid = (x['groupIdentifier'])
            
            owner = (x.get('owner'))
            batstat = (x['batteryStatus'])
            
            serial = (x['serialNumber'])
            lostmode = (x['lostModeMetadata'])
            
            cap = (x['capabilities'])
            id = (x['identifier'])
            
            asubAdministrativeArea =  (x['address'].get('subAdministrativeArea'))
            aslabel =  (x['address'].get('label'))
            astreetAddress =  (x['address'].get('streetAddress'))
            acountryCode =  (x['address'].get('countryCode'))
            astateCode =  (x['address'].get('stateCode'))
            administrativeArea =  (x['address'].get('administrativeArea'))
            astreetName =  (x['address'].get('streetName'))
            aformattedAddressLines =  (x['address'].get('formattedAddressLines'))
            amapItemFullAddress =  (x['address'].get('mapItemFullAddress'))
            afullThroroughfare =  (x['address'].get('fullThroroughfare'))
            areaOfInterest =  (x['address'].get('areaOfInterest'))
            alocality =  (x['address'].get('locality'))
            acountry =  (x['address'].get('country'))
            
            lpostype = (x['location'].get('positionType'))
            lverticalAccuracy = (x['location'].get('verticalAccuracy'))
            llong = (x['location'].get('longitude'))
            lfloor = (x['location'].get('floorLevel'))
            lisin = (x['location'].get('isInaccurate'))
            lisold = (x['location'].get('isOld'))
            lhorz = (x['location'].get('horizontalAccuracy'))
            llat = (x['location'].get('latitude'))
            ltimestamp = (x['location'].get('timeStamp'))
            ltimestamp = timestampcalc(ltimestamp)
            lalt = (x['location'].get('altitude'))
            lloc = (x['location'].get('locationFinished'))
            
            sysver = (x['systemVersion'])
            crowdloc = (x['crowdSourcedLocation'])
            crowdpostype = (x['crowdSourcedLocation'].get('positionType'))
            crowdvert = (x['crowdSourcedLocation'].get('verticalAccuracy'))
            crowdlong = (x['crowdSourcedLocation'].get('longitude'))
            crowdfloor = (x['crowdSourcedLocation'].get('floorLevel'))
            crowdisacc = (x['crowdSourcedLocation'].get('isInaccurate'))
            crowdisold = (x['crowdSourcedLocation'].get('isOld'))
            crowdhorzcc = (x['crowdSourcedLocation'].get('horizontalAccuracy'))
            crowdlat = (x['crowdSourcedLocation'].get('latitude'))
            crowdtimestamp= (x['crowdSourcedLocation'].get('timeStamp'))
            crowdtimestamp = timestampcalc(crowdtimestamp)
            crowdalt = (x['crowdSourcedLocation'].get('altitude'))
            crowdlocfin = (x['crowdSourcedLocation'].get('locationFinished'))
            
            rname = (x['role'].get('name'))
            remoji = (x['role'].get('emoji'))
            ris = (x['role'].get('identifier'))
            
            for safeloc in x.get('safeLocations'):
                sname = (safeloc.get('name'))
                stype = (safeloc.get('type'))
                sid = (safeloc.get('identifier'))
                sva = (safeloc['location'].get('verticalAccuracy'))
                sha = (safeloc['location'].get('horizontalAccuracy'))
                slong = (safeloc['location'].get('longitude'))
                slat = (safeloc['location'].get('latitude'))
                sfloor = (safeloc['location'].get('floorLevel'))
                sisina = (safeloc['location'].get('isInaccurate'))
                sisold = (safeloc['location'].get('isOld'))
                stimestamp = (safeloc['location'].get('timeStamp'))
                stimestamp = timestampcalc(stimestamp)
                salt = (safeloc['location'].get('altitude'))
                ssub = (safeloc['address'].get('subAdministrativeArea'))
                slabel = (safeloc['address'].get('label'))
                sstreet = (safeloc['address'].get('streetAddres'))
                scountry = (safeloc['address'].get('countryCode'))
                sstate = (safeloc['address'].get('stateCode'))
                sadmin = (safeloc['address'].get('administrativeArea'))
                pstreetn = (safeloc['address'].get('streetName'))
                sformated = (safeloc['address'].get('formattedAddressLines'))
                smapfull = (safeloc['address'].get('mapItemFullAddress'))
                sthro = (safeloc['address'].get('fullThroroughfare'))
                saoi = (safeloc['address'].get('areaOfInterest'))
                sloc = (safeloc['address'].get('locality'))
                scount = (safeloc['address'].get('country'))
                
                data_list_safeloc.append((stimestamp, name, serial, id, rname, remoji, ris, sname, stype, sid, sva, sha, slong, slat, sfloor, sisina, sisold, salt, ssub, slabel, sstreet, scountry, sstate, sadmin, pstreetn, sformated, smapfull, sthro, saoi, sloc, scount))	
                
            data_list_info.append((name, serial, id, rname, remoji, ris, ptype, maname, pid, vid, ap, gid, owner, batstat, lostmode, cap, sysver,))
            
            data_list_location.append((ltimestamp, name, serial, id, rname, remoji, ris, ptype, maname, pid, vid, ap, gid, owner, batstat, lostmode, cap, sysver, asubAdministrativeArea, aslabel, astreetAddress, acountryCode, astateCode, administrativeArea, astreetName, aformattedAddressLines, amapItemFullAddress, afullThroroughfare, areaOfInterest, alocality, lpostype, lverticalAccuracy, llong, lisin, lisold, lhorz, llat, lalt, lloc, acountry))
            
            data_list_crowdloc.append((crowdtimestamp, name, serial, id, rname, remoji, ris, ptype, maname, pid, vid, ap, gid, owner, batstat, lostmode, cap, sysver, crowdpostype, crowdvert, crowdlong, crowdlat, crowdalt, crowdfloor, crowdisacc, crowdisold, crowdhorzcc, crowdlocfin ))

    if data_list_safeloc:
        report = ArtifactHtmlReport('Safe Locations')
        report.start_artifact_report(report_folder, 'Safe Locations')
        report.add_script()
        data_list_safeloc_headers = ('Timestamp', 'Name', 'Serial', 'ID', 'Role Name', 'Emoji', 'Role ID', 'Name', 'Type', 'Identifier', 'Vertical Accuracy', 'Horizontal Accuracy', 'Longitude', 'Latitude', 'Floor Level', 'Is Inaccurate', 'Is Old', 'Altitude', 'Sub-Administrative Area', 'Label', 'Street Address', 'Country Code', 'State Code', 'Administrative Area', 'Street Name', 'Formatted Address Line', 'Map Item Full Address', 'Throroughfare', 'Area of Interest', 'Locality', 'Country' )
        report.write_artifact_data_table(data_list_safeloc_headers, data_list_safeloc, file_found)
        report.end_artifact_report()
        
        tsvname = f'Airtags Safe Locations'
        tsv(report_folder, data_list_safeloc_headers, data_list_safeloc, tsvname)
        
        tlactivity = f'Airtags Safe Locations'
        timeline(report_folder, tlactivity, data_list_safeloc, data_list_safeloc_headers)
        
        kmlactivity = 'Airtags Safe Locations'
        kmlgen(report_folder, kmlactivity, data_list_safeloc, data_list_safeloc_headers)
    else:
        logfunc('No Airtags Safe Locations data available')
            
    if data_list_location:
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder, 'Locations')
        report.add_script()
        data_list_location_headers = ('Timestamp','Name', 'Serial', 'ID', 'Role Name', 'Emoji', 'Role ID', 'Product Type', 'Manufacturer', 'Product ID', 'Vendor ID', 'Antenna Power', 'Group ID', 'Owner', 'Battery Status', 'Lost Mode', 'Capabilities', 'System Version', 'Sub-administrative Area', 'Label', 'Street Address', 'Country Code', 'State Code', 'Administrative Area', 'Street Name', 'Formatted Address Line', 'Item Full Address', 'Throroughfare', 'Area of Interest', 'Locality', 'Type', 'Vertical Accuracy', 'Longitude', 'Is Inaccurate', 'Is Old', 'Horizontal Accuracy', 'Latitude', 'Altitude', 'Location Finished', 'Country'  )
        report.write_artifact_data_table(data_list_location_headers, data_list_location, file_found)
        report.end_artifact_report()
        
        tsvname = f'Airtags Locations'
        tsv(report_folder, data_list_location_headers, data_list_location, tsvname)
        
        tlactivity = f'Airtags Locations'
        timeline(report_folder, tlactivity, data_list_location, data_list_location_headers)
        
        kmlactivity = 'Airtags Locations'
        kmlgen(report_folder, kmlactivity, data_list_location, data_list_location_headers)
    else:
        logfunc('No Airtags Locations data available')
        
    if data_list_info:
        report = ArtifactHtmlReport('Airtags Info')
        report.start_artifact_report(report_folder, 'Airtags Info')
        report.add_script()
        data_list_info_headers = ('Name', 'Serial', 'ID', 'Role Name', 'Emoji', 'Role ID', 'Product Type', 'Manufacturer', 'Product ID', 'Vendor ID', 'Antenna Power', 'Group ID', 'Owner', 'Battery Status', 'Lost Mode', 'Capabilities', 'System Version' )
        report.write_artifact_data_table(data_list_info_headers, data_list_info, file_found)
        report.end_artifact_report()
        
        tsvname = f'Airtags Info'
        tsv(report_folder, data_list_info_headers, data_list_info, tsvname)
        
        tlactivity = f'Airtags Info'
        timeline(report_folder, tlactivity, data_list_info, data_list_info_headers)
        
    else:
        logfunc('No Airtags Info data available')
        
    
    if data_list_crowdloc:
        report = ArtifactHtmlReport('Crowdsourced Locations')
        report.start_artifact_report(report_folder, 'Crowdsourced Locations')
        report.add_script()
        data_headers_crowdloc = ('Timestamp', 'Name', 'Serial', 'ID', 'Role Name', 'Emoji', 'Role ID', 'Product Type', 'Manufacturer', 'Product ID', 'Vendor ID', 'Antenna Power', 'Group ID', 'Owner', 'Battery Status', 'Lost Mode', 'Capabilities', 'System Version', 'Position Type', 'Vertical Accuracy', 'Longitude', 'Latitude', 'Altitude', 'Floor Level', 'Is Inaccurate', 'Is Old', 'Horizontal Accuracy', 'Location Finished')
        report.write_artifact_data_table(data_headers_crowdloc, data_list_crowdloc, file_found)
        report.end_artifact_report()
        
        tsvname = f'Airtgas Crowdsourced Locations'
        tsv(report_folder, data_headers_crowdloc, data_list_crowdloc, tsvname)
        
        tlactivity = f'Airtags Crowdsourced Locations'
        timeline(report_folder, tlactivity, data_list_crowdloc, data_headers_crowdloc)
        
        kmlactivity = 'Airtags Crowdsourced Locations'
        kmlgen(report_folder, kmlactivity, data_list_crowdloc, data_headers_crowdloc)
        
    else:
        logfunc('No Airtags Crowdsourced Locations data available')
Exemplo n.º 13
0
def get_routineDlocations(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("10"):
        logfunc(
            "Unsupported version for RoutineD Locations Cache.sqlite on iOS " +
            iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)

            if file_found.endswith('Cache.sqlite'):
                break

        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(ZTIMESTAMP + 978307200, 'UNIXEPOCH') AS "TIMESTAMP",
            ZLATITUDE || ", " || ZLONGITUDE AS "COORDINATES",
            ZALTITUDE AS "ALTITUDE",
            ZCOURSE AS "COURSE",
            ZSPEED AS "SPEED (M/S)",
            ZSPEED*2.23694 AS "SPEED (MPH)",
            ZSPEED*3.6 AS "SPEED (KMPH)",
            ZHORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
            ZVERTICALACCURACY AS "VERTICAL ACCURACY",
            ZLATITUDE AS "LATITUDE",
            ZLONGITUDE AS "LONGITUDE",
            ZRTCLLOCATIONMO.Z_PK AS "ZRTCLLOCATIONMO TABLE ID" 
            FROM
            ZRTCLLOCATIONMO
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11]))

            description = 'Granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD ZRTCLLOCATIONMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Coordinates', 'Altitude', 'Course',
                            'Speed (M/S)', 'Speed (MPH)', 'Speed (KMPH)',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTCLLOCATIONMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTCLLOCATIONMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTCLLOCATIONMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTCLLOCATIONMO data available')

        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(ZDATE + 978307200, 'UNIXEPOCH') AS "TIMESTAMP",
            ZLATITUDE || ", " || ZLONGITUDE AS "COORDINATES",
            ZSOURCE AS "SOURCE",
            ZLATITUDE AS "LATITUDE",
            ZLONGITUDE AS "LONGITUDE",
            ZRTHINTMO.Z_PK AS "ZRTHINTMO TABLE ID" 
        FROM
            ZRTHINTMO 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            description = 'Semi-granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTHINTMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Coordinates', 'Source', 'Latitude',
                            'Longitude', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTHINTMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTHINTMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTHINTMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTHINTMO data available')

        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
                DATETIME(ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY TIMESTAMP",
                DATETIME(ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT TIMESTAMP",
                ZLOCATIONLATITUDE || ", " ||   ZLOCATIONLONGITUDE AS "COORDINATES",
                DATETIME(ZDETECTIONDATE + 978307200, 'UNIXEPOCH') AS "DETECTION TIMESTAMP",
                (ZEXITDATE-ZENTRYDATE)/60.00 AS "VISIT TIME (MINUTES)",
                ZTYPE AS "TYPE",
                ZLOCATIONLATITUDE AS "LATITUDE",
                ZLOCATIONLONGITUDE AS "LONGITUDE",
                ZLOCATIONUNCERTAINTY AS "UNCERTAINTY",
                ZDATAPOINTCOUNT AS "DATA POINT COUNT",
                Z_PK AS "ZRTVISITMO TABLE ID" 
            FROM
                ZRTVISITMO
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10]))

            description = 'Visit locations'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTVISITMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Exit Timestamp', 'Coordinates',
                            'Detection Timestamp', 'Visit Time (Minutes)',
                            'Type', 'Latitude', 'Longitude', 'Uncertainty',
                            'Data Point Count', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTVISITMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTVISITMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTVISITMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTVISITMO data available')
Exemplo n.º 14
0
def get_routineDlocations(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("10"):
        logfunc(
            "Unsupported version for RoutineD Locations Cache.sqlite on iOS " +
            iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)

            if file_found.endswith('Cache.sqlite'):
                break

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(ztimestamp + 978307200, 'unixepoch'),
        zaltitude,
        zcourse,
        zspeed,
        zspeed*2.23694,
        zspeed*3.6,
        zhorizontalaccuracy,
        zverticalaccuracy,
        zlatitude,
        zlongitude 
        from
        zrtcllocationmo
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9]))

            description = 'Granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD ZRTCLLOCATIONMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Altitude', 'Course', 'Speed (M/S)',
                            'Speed (MPH)', 'Speed (KMPH)',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTCLLOCATIONMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTCLLOCATIONMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTCLLOCATIONMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTCLLOCATIONMO data available')

        cursor.execute('''
        select
        datetime(zdate + 978307200, 'unixepoch'),
        zsource,
        zlatitude,
        zlongitude
        from
        zrthintmo 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            description = 'Semi-granular location data (~ 1 week)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTHINTMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Source', 'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTHINTMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTHINTMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTHINTMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTHINTMO data available')

        cursor.execute('''
        select
        datetime(zentrydate + 978307200, 'unixepoch'),
        datetime(zexitdate + 978307200, 'unixepoch'),
        datetime(zdetectiondate + 978307200, 'unixepoch'),
        (zexitdate-zentrydate)/60.00,
        ztype,
        zlocationlatitude,
        zlocationlongitude,
        zlocationuncertainty
        from
        zrtvisitmo
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7]))

            description = 'Visit locations'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD ZRTVISITMO',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Exit Timestamp',
                            'Detection Timestamp', 'Visit Time (Minutes)',
                            'Type', 'Latitude', 'Longitude', 'Uncertainty')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD ZRTVISITMO'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD ZRTVISITMO'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD ZRTVISITMO'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD ZRTVISITMO data available')
Exemplo n.º 15
0
def get_photosMetadata(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc("Unsupported version for Photos.sqlite metadata on iOS " +
                iOSversion)
    elif (version.parse(iOSversion) >= version.parse("13")) & (
            version.parse(iOSversion) < version.parse("14")):
        file_found = str(files_found[0])
        #os.chmod(file_found, 0o0777)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()

        cursor.execute("""
		SELECT
		DateTime( ZGENERICASSET.ZDATECREATED + 978307200, 'UNIXEPOCH' ) AS 'DateCreated',
				ZGENERICASSET.Z_PK AS 'GenericAsset_zpk',
				ZGENERICASSET.ZADDITIONALATTRIBUTES AS 'AddAttributes_Key',
				ZDETECTEDFACE.ZASSET AS 'DetectedFaceAsset',
			CASE
					ZGENERICASSET.ZKIND 
					WHEN 0 THEN
					'Photo' 
					WHEN 1 THEN
					'Video' 
				END AS 'Kind',
		ZADDITIONALASSETATTRIBUTES.ZEXIFTIMESTAMPSTRING AS 'EXIFtimestamp',
		DateTime( ZADDITIONALASSETATTRIBUTES.ZSCENEANALYSISTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'SceneAnalysisTimeStamp',
				DateTime( ZGENERICASSET.ZANALYSISSTATEMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'AnalysisStateModificationDate',		
				DateTime( ZGENERICASSET.ZADDEDDATE + 978307200, 'UNIXEPOCH' ) AS 'AddDate',
				ZGENERICASSET.ZFILENAME AS 'FileName',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALFILENAME AS 'OriginalFilename',
		ZGENERICALBUM.ZTITLE AS 'AlbumTitle',
				ZADDITIONALASSETATTRIBUTES.ZCREATORBUNDLEID AS 'CreatorBundleID',
				ZADDITIONALASSETATTRIBUTES.ZEDITORBUNDLEID AS 'EditorBundleID',
				ZGENERICASSET.ZDIRECTORY AS 'Directory',
				ZGENERICASSET.ZUNIFORMTYPEIDENTIFIER AS 'UniformID',
			CASE
					ZGENERICASSET.ZSAVEDASSETTYPE 
					WHEN 0 THEN
					'Saved from other source' 
					WHEN 2 THEN
					'Photo Streams Data' 
					WHEN 3 THEN
					'Made/saved with this device' 
					WHEN 4 THEN
					'Default row' 
					WHEN 7 THEN
					'Deleted' ELSE ZGENERICASSET.ZSAVEDASSETTYPE 
				END AS 'SavedAssetType',
			CASE
				  WHEN ZGENERICASSET.ZFACEAREAPOINTS > 0 THEN 'Yes'
				  ELSE 'NA' 
				END AS 'FaceDetectedinPhoto',
				ZPERSON.ZDISPLAYNAME AS 'DisplayName',
				ZPERSON.ZFULLNAME AS 'FullName',
				ZPERSON.ZFACECOUNT AS 'FaceCount',
				ZDETECTEDFACE.ZPERSON AS 'Person',
				ZPERSON.ZCONTACTMATCHINGDICTIONARY AS 'ContactBlob',
				ZPERSON.ZPERSONUUID as 'PersonUUID',
				ZDETECTEDFACE.ZQUALITYMEASURE AS 'DetectedFaceQuality',
			CASE
					ZDETECTEDFACE.ZAGETYPE
					WHEN 1 THEN
					'Baby/Toddler'
					WHEN 2 THEN
					'Baby/Toddler'
					WHEN 3 THEN
					'Child/YoungAdult'
					WHEN 4 THEN
					'YoungAdult/Adult'
					WHEN 5 THEN
					'Adult'
					ELSE ZDETECTEDFACE.ZAGETYPE
				END AS 'AgeTypeEstimate',
			CASE
					ZDETECTEDFACE.ZGENDERTYPE
					WHEN 1 THEN
					'Male'
					WHEN 2 THEN
					'Female'
					ELSE ZDETECTEDFACE.ZGENDERTYPE
				END AS 'Gender',
			CASE
				  ZDETECTEDFACE.ZGLASSESTYPE
				  WHEN 3 THEN
				  'None'
				  WHEN 2 THEN
				  'Sun'
				  WHEN 1 THEN
				  'Eye'
				  ELSE ZDETECTEDFACE.ZGLASSESTYPE
				END AS 'GlassesType',
			CASE
				  ZDETECTEDFACE.ZFACIALHAIRTYPE
				  WHEN 1 THEN
				  'None'
				  WHEN 2 THEN
				  'Beard/Mustache'
				  WHEN 3 THEN
				  'Goatee'
				  WHEN 5 THEN
				  'Stubble'
				  ELSE ZDETECTEDFACE.ZFACIALHAIRTYPE
				END AS 'FacialHairType',
			CASE
				  ZDETECTEDFACE.ZBALDTYPE
				  WHEN 2 THEN
				  'Bald'
				  WHEN 3 THEN
				  'NotBald'
				  ELSE ZDETECTEDFACE.ZBALDTYPE
				END AS 'Baldness',  	
				ZGENERICASSET.ZORIGINALCOLORSPACE AS 'ColorSpace',
				ZGENERICASSET.Zduration AS 'Duration',
				ZGENERICASSET.Zvideocpdurationvalue AS 'VideoDuration',
			CASE
					ZGENERICASSET.ZCOMPLETE 
					WHEN 1 THEN
					'Yes' 
				END AS 'Complete',
			CASE
					ZGENERICASSET.ZVISIBILITYSTATE 
					WHEN 0 THEN
					'Visible' 
					WHEN 1 THEN
					'Photo Streams Data' 
					WHEN 2 THEN
					'Burst' ELSE ZVISIBILITYSTATE 
				END AS 'VisibilityState',
			CASE
					ZGENERICASSET.ZFAVORITE 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'Favorite',
			CASE
					ZGENERICASSET.zhidden 
					WHEN 0 THEN
					'Not_Hidden' 
					WHEN 1 THEN
					'File_Hidden' ELSE ZGENERICASSET.zhidden 
				END AS 'Hidden_File',
			CASE
					ZGENERICASSET.ZTRASHEDSTATE 
					WHEN 1 THEN
					'In_Trash' 
					WHEN 0 THEN
					'Not_In_Trash' ELSE ZGENERICASSET.ZTRASHEDSTATE 
				END AS 'TrashState',
				DateTime( ZGENERICASSET.ZTRASHEDDATE + 978307200, 'UNIXEPOCH' ) AS 'FileTrashDate',
				ZADDITIONALASSETATTRIBUTES.ZVIEWCOUNT AS 'ViewCount',
				ZADDITIONALASSETATTRIBUTES.ZPLAYCOUNT AS 'PlayCount',
				ZADDITIONALASSETATTRIBUTES.ZSHARECOUNT AS 'ShareCount',
				DateTime( ZGENERICASSET.ZLASTSHAREDDATE + 978307200, 'UNIXEPOCH' ) AS 'LastSharedDate',
				DateTime( ZGENERICASSET.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'FileModificationDate',
			CASE
					ZGENERICASSET.ZHASADJUSTMENTS 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'Has_Adjustments',
				DateTime( ZGENERICASSET.ZADJUSTMENTTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'AdjustmentsTimeStamp',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALFILESIZE AS 'OriginalFileSize',
				ZGENERICASSET.ZHEIGHT AS 'File_Height',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALHEIGHT AS 'OrgFileHeight',
				ZGENERICASSET.ZWIDTH AS 'File_Width',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALWIDTH AS 'OrgFileWidth',
			CASE
					ZGENERICASSET.ZORIENTATION 
					WHEN 1 THEN
					'Horizontal (left)' 
					WHEN 3 THEN
					'Horizontal (right)' 
					WHEN 6 THEN
					'Vertical (up)' 
					WHEN 8 THEN
					'Vertical (down)' ELSE ZORIENTATION 
				END AS 'Orientation',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZORIGINALORIENTATION 
					WHEN 1 THEN
					'Horizontal (left)' 
					WHEN 3 THEN
					'Horizontal (right)' 
					WHEN 6 THEN
					'Vertical (up)' 
					WHEN 8 THEN
					'Vertical (down)' ELSE ZORIENTATION 
				END AS 'Org_Orientation',
				ZADDITIONALASSETATTRIBUTES.ZTIMEZONENAME AS 'TimeZoneName',
				ZADDITIONALASSETATTRIBUTES.ZTIMEZONEOFFSET AS 'TimeZoneOffset',
				ZADDITIONALASSETATTRIBUTES.ZINFERREDTIMEZONEOFFSET AS 'InferredTimeZoneOffset',
				ZGENERICASSET.ZLOCATIONDATA AS 'FileLocationData',        
			CASE
					ZGENERICASSET.ZLATITUDE 
					WHEN - 180.0 THEN
					'' ELSE ZGENERICASSET.ZLATITUDE 
				END AS 'Latitude',
			CASE
					ZGENERICASSET.ZLONGITUDE 
					WHEN - 180.0 THEN
					'' ELSE ZGENERICASSET.ZLONGITUDE 
				END AS 'Longitude',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZSHIFTEDLOCATIONISVALID 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'ShiftedLocationValid',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATAISVALID 
					WHEN 0 THEN
					'No_Check_SceneAnalysis' 
					WHEN 1 THEN
					'Yes_Check_SceneAnalysis' 
				END AS 'ReverseLocationDataIsValid',
				ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATA AS 'OrgFileReverseLocationData',
				ZGENERICASSET.Zthumbnailindex AS 'ThumbnailIndex',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILWIDTH AS 'EmbeddedThumbnailWidth',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILHEIGHT AS 'EmbeddedThumbnailHeight',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILOFFSET AS 'EmbeddedThumbnailOffset',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILLENGTH AS 'EmbeddedThumbnailLenght',
				ZGENERICASSET.ZMOMENT AS 'MomentPK',
				DateTime( ZMOMENT.ZSTARTDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentStartDate',
				DateTime( ZMOMENT.Zrepresentativedate + 978307200, 'UNIXEPOCH' ) AS 'MomentRepresentativeDate',
				DateTime( ZMOMENT.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentModificationDate',
				DateTime( ZMOMENT.ZENDDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentEndDate',
				ZMOMENT.ZTITLE AS 'MomentTitle',
			CASE
					ZMOMENT.Zapproximatelatitude 
					WHEN - 180.0 THEN
					'' ELSE ZMOMENT.Zapproximatelatitude 
				END AS 'MomentApproxLatitude',
			CASE
					ZMOMENT.Zapproximatelongitude 
					WHEN - 180.0 THEN
					'' ELSE ZMOMENT.Zapproximatelongitude 
				END AS 'MomentApproxLongitude',
				ZGENERICASSET.ZUUID AS 'UUID',
				ZGENERICASSET.ZMEDIAGROUPUUID AS 'MediaGroupUUID',
				ZGENERICASSET.ZCLOUDASSETGUID AS 'CloudAssetGUID',
				ZADDITIONALASSETATTRIBUTES.ZPUBLICGLOBALUUID AS 'PublicGlobalUUID',
				ZADDITIONALASSETATTRIBUTES.ZMASTERFINGERPRINT AS 'MasterFingerprint',
				ZADDITIONALASSETATTRIBUTES.ZADJUSTEDFINGERPRINT AS 'AdjustedFingerprint' 
			FROM
				ZGENERICASSET
				JOIN Z_PRIMARYKEY ON ZGENERICASSET.z_ent = Z_PRIMARYKEY.z_ent
				LEFT JOIN ZMOMENT ON ZGENERICASSET.ZMOMENT = ZMOMENT.Z_PK
				JOIN ZADDITIONALASSETATTRIBUTES ON ZGENERICASSET.ZADDITIONALATTRIBUTES = ZADDITIONALASSETATTRIBUTES.Z_PK
				LEFT JOIN ZDETECTEDFACE ON ZADDITIONALASSETATTRIBUTES.ZASSET = ZDETECTEDFACE.ZASSET
				LEFT JOIN ZPERSON ON ZPERSON.Z_PK = ZDETECTEDFACE.ZPERSON
		LEFT JOIN Z_26ASSETS ON ZGENERICASSET.Z_PK = Z_26ASSETS.Z_34ASSETS
		LEFT JOIN ZGENERICALBUM ON ZGENERICALBUM.Z_PK = Z_26ASSETS.Z_26ALBUMS
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        counter = 0
        if usageentries > 0:
            for row in all_rows:
                postal_address = ''
                postal_address_subadminarea = ''
                postal_address_sublocality = ''

                if row[61] is not None:
                    pathto = os.path.join(
                        report_folder,
                        'ReverseLocationData' + str(counter) + '.bplist')
                    with open(pathto, 'ab') as wf:
                        wf.write(row[61])

                    with open(pathto, 'rb') as f:
                        try:
                            deserialized_plist = nd.deserialize_plist(f)
                            postal_address = deserialized_plist[
                                'postalAddress']['_formattedAddress']
                            postal_address_subadminarea = deserialized_plist[
                                'postalAddress']['_subAdministrativeArea']
                            postal_address_sublocality = deserialized_plist[
                                'postalAddress']['_subLocality']

                        except:
                            logfunc(
                                'Error reading exported bplist from Asset PK' +
                                row[0])
                            deserialized_plist = None

                data_list.append(
                    (row[0], row[0], postal_address,
                     postal_address_subadminarea, postal_address_sublocality,
                     row[1], row[2], row[3], row[4], row[5], row[6], row[7],
                     row[8], row[9], row[10], row[11], row[12], row[13],
                     row[14], row[15], row[16], row[17], row[18], row[19],
                     row[20], row[21], row[22], row[23], row[24], row[25],
                     row[26], row[27], row[28], row[29], row[30], row[31],
                     row[32], row[33], row[34], row[35], row[36], row[37],
                     row[38], row[39], row[40], row[41], row[42], row[43],
                     row[44], row[45], row[46], row[47], row[48], row[49],
                     row[50], row[51], row[52], row[53], row[54], row[55],
                     row[56], row[57], row[58], row[59], row[60], row[61],
                     row[62], row[63], row[64], row[65], row[66], row[67],
                     row[68], row[69], row[70], row[71], row[72], row[73],
                     row[74], row[75], row[76], row[77], row[78], row[79],
                     row[80]))

                counter += 1

            description = ''
            report = ArtifactHtmlReport('Photos.sqlite')
            report.start_artifact_report(report_folder, 'Metadata',
                                         description)
            report.add_script()
            data_headers = (
                'Timestamp', 'Date Created', 'Postal Address',
                'Postal Subadmin Area', 'Postal Sublocality',
                'Generic Asset ZPK', 'Add Attributes Key',
                'Detected Face Asset', 'Kind', 'EXIF Timestamp',
                'Scene Analysis Timestamp', 'Analysis State Modified Date',
                'Add Date', 'Filename', 'Original Filename', 'Album Title',
                'Creator Bundle ID', 'Editor Bundle ID', 'Directory',
                'Uniform ID', 'Saved Asset Type', 'Face Detected in Photo',
                'Display Name', 'Full Name', 'Face Count', 'Person',
                'Contact Blob', 'Person UUID', 'Detected Face Quality',
                'Age Type Estimate', 'Gender', 'Glasses Type',
                'Facial Hair Type', 'Baldness', 'Color Space', 'Duration',
                'Video Duration', 'Complete', 'Visibility State', 'Favorite',
                'Hidden Fie?', 'Trash State', 'File Trash Date', 'View Count',
                'Play Count', 'Share Count', 'Last Shared Date',
                'File Modification Date', 'Has Adjustments?',
                'Adjustment Timestamp', 'Original File Size', 'File Height',
                'Org File Height', 'File Width', 'Org File Width',
                'Orientation', 'Org Orientation', 'Timezone Name',
                'Timezone Offset', 'Infered Timezone Offset',
                'File Location Data', 'Latitude', 'Longitude',
                'Shifted Location Valid', 'Reverse Lcoation Data is Valid',
                'Org File Reverse Location Data', 'Thumbnail Index',
                'Embedded Thumbnail Width', 'Embedded Thumbnail Height',
                'Embedded Thumbnail Offset', 'Embedded Thumbnail Lenght',
                'Moment PK', 'Moment Start Date', 'Moment Representative Date',
                'Moment Modification Date', 'Moment End Date', 'Moment Title',
                'Moment Approx Latitude', 'Moment Approx Longitude', 'UUID',
                'Media Group UUID', 'Cloud Assest GUID', 'Public Global UUID',
                'Master Fingetprint', 'Adjusted Fingerprint')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Photos-sqlite Metadata'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Photos-sqlite Metadata'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Photos-sqlite Metadata'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No data available for Photos.sqlite metadata')

        db.close()
        return
    elif (version.parse(iOSversion) >= version.parse("14")):
        file_found = str(files_found[0])
        #os.chmod(file_found, 0o0777)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()

        cursor.execute("""
		SELECT
				
				DateTime( ZASSET.ZDATECREATED + 978307200, 'UNIXEPOCH' ) AS 'DateCreated',
				ZASSET.Z_PK AS 'Asset_zpk',
				ZASSET.ZADDITIONALATTRIBUTES AS 'AddAttributes_Key',
				ZDETECTEDFACE.ZASSET AS 'DetectedFaceAsset',
			CASE
					ZASSET.ZKIND 
					WHEN 0 THEN
					'Photo' 
					WHEN 1 THEN
					'Video' 
				END AS 'Kind',
				
		ZADDITIONALASSETATTRIBUTES.ZEXIFTIMESTAMPSTRING AS 'EXIFtimestamp',
		DateTime( ZADDITIONALASSETATTRIBUTES.ZSCENEANALYSISTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'SceneAnalysisTimeStamp',
				DateTime( ZASSET.ZANALYSISSTATEMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'AnalysisStateModificationDate',		
				DateTime( ZASSET.ZADDEDDATE + 978307200, 'UNIXEPOCH' ) AS 'AddDate',
				ZASSET.ZFILENAME AS 'FileName',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALFILENAME AS 'OriginalFilename',
		ZGENERICALBUM.ZTITLE AS 'AlbumTitle',
				ZADDITIONALASSETATTRIBUTES.ZCREATORBUNDLEID AS 'CreatorBundleID',
				ZADDITIONALASSETATTRIBUTES.ZEDITORBUNDLEID AS 'EditorBundleID',
				ZASSET.ZDIRECTORY AS 'Directory',
				ZASSET.ZUNIFORMTYPEIDENTIFIER AS 'UniformID',
			CASE
					ZASSET.ZSAVEDASSETTYPE 
					WHEN 0 THEN
					'Saved from other source' 
					WHEN 2 THEN
					'Photo Streams Data' 
					WHEN 3 THEN
					'Made/saved with this device' 
					WHEN 4 THEN
					'Default row' 
					WHEN 7 THEN
					'Deleted' ELSE ZASSET.ZSAVEDASSETTYPE 
				END AS 'SavedAssetType',
			CASE
				  WHEN ZASSET.ZFACEAREAPOINTS > 0 THEN 'Yes'
				  ELSE 'NA' 
				END AS 'FaceDetectedinPhoto',
				ZPERSON.ZDISPLAYNAME AS 'DisplayName',
				ZPERSON.ZFULLNAME AS 'FullName',
				ZPERSON.ZFACECOUNT AS 'FaceCount',
				ZDETECTEDFACE.ZPERSON AS 'Person',
				ZPERSON.ZCONTACTMATCHINGDICTIONARY AS 'ContactBlob',
				ZPERSON.ZPERSONUUID as 'PersonUUID',
				ZDETECTEDFACE.ZQUALITYMEASURE AS 'DetectedFaceQuality',
			CASE
					ZDETECTEDFACE.ZAGETYPE
					WHEN 1 THEN
					'Baby/Toddler'
					WHEN 2 THEN
					'Baby/Toddler'
					WHEN 3 THEN
					'Child/YoungAdult'
					WHEN 4 THEN
					'YoungAdult/Adult'
					WHEN 5 THEN
					'Adult'
					ELSE ZDETECTEDFACE.ZAGETYPE
				END AS 'AgeTypeEstimate',
			CASE
					ZDETECTEDFACE.ZGENDERTYPE
					WHEN 1 THEN
					'Male'
					WHEN 2 THEN
					'Female'
					ELSE ZDETECTEDFACE.ZGENDERTYPE
				END AS 'Gender',
			CASE
				  ZDETECTEDFACE.ZGLASSESTYPE
				  WHEN 3 THEN
				  'None'
				  WHEN 2 THEN
				  'Sun'
				  WHEN 1 THEN
				  'Eye'
				  ELSE ZDETECTEDFACE.ZGLASSESTYPE
				END AS 'GlassesType',
			CASE
				  ZDETECTEDFACE.ZFACIALHAIRTYPE
				  WHEN 1 THEN
				  'None'
				  WHEN 2 THEN
				  'Beard/Mustache'
				  WHEN 3 THEN
				  'Goatee'
				  WHEN 5 THEN
				  'Stubble'
				  ELSE ZDETECTEDFACE.ZFACIALHAIRTYPE
				END AS 'FacialHairType',
			CASE
				  ZDETECTEDFACE.ZBALDTYPE
				  WHEN 2 THEN
				  'Bald'
				  WHEN 3 THEN
				  'NotBald'
				  ELSE ZDETECTEDFACE.ZBALDTYPE
				END AS 'Baldness',  	
				ZASSET.ZORIGINALCOLORSPACE AS 'ColorSpace',
				ZASSET.Zduration AS 'Duration',
				ZASSET.Zvideocpdurationvalue AS 'VideoDuration',
			CASE
					ZASSET.ZCOMPLETE 
					WHEN 1 THEN
					'Yes' 
				END AS 'Complete',
			CASE
					ZASSET.ZVISIBILITYSTATE 
					WHEN 0 THEN
					'Visible' 
					WHEN 1 THEN
					'Photo Streams Data' 
					WHEN 2 THEN
					'Burst' ELSE ZVISIBILITYSTATE 
				END AS 'VisibilityState',
			CASE
					ZASSET.ZFAVORITE 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'Favorite',
			CASE
					ZASSET.zhidden 
					WHEN 0 THEN
					'Not_Hidden' 
					WHEN 1 THEN
					'File_Hidden' ELSE ZASSET.zhidden 
				END AS 'Hidden_File',
			CASE
					ZASSET.ZTRASHEDSTATE 
					WHEN 1 THEN
					'In_Trash' 
					WHEN 0 THEN
					'Not_In_Trash' ELSE ZASSET.ZTRASHEDSTATE 
				END AS 'TrashState',
				DateTime( ZASSET.ZTRASHEDDATE + 978307200, 'UNIXEPOCH' ) AS 'FileTrashDate',
				ZADDITIONALASSETATTRIBUTES.ZVIEWCOUNT AS 'ViewCount',
				ZADDITIONALASSETATTRIBUTES.ZPLAYCOUNT AS 'PlayCount',
				ZADDITIONALASSETATTRIBUTES.ZSHARECOUNT AS 'ShareCount',
				DateTime( ZASSET.ZLASTSHAREDDATE + 978307200, 'UNIXEPOCH' ) AS 'LastSharedDate',
				DateTime( ZASSET.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'FileModificationDate',
			CASE
					ZASSET.ZHASADJUSTMENTS 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'Has_Adjustments',
				DateTime( ZASSET.ZADJUSTMENTTIMESTAMP + 978307200, 'UNIXEPOCH' ) AS 'AdjustmentsTimeStamp',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALFILESIZE AS 'OriginalFileSize',
				ZASSET.ZHEIGHT AS 'File_Height',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALHEIGHT AS 'OrgFileHeight',
				ZASSET.ZWIDTH AS 'File_Width',
				ZADDITIONALASSETATTRIBUTES.ZORIGINALWIDTH AS 'OrgFileWidth',
			CASE
					ZASSET.ZORIENTATION 
					WHEN 1 THEN
					'Horizontal (left)' 
					WHEN 3 THEN
					'Horizontal (right)' 
					WHEN 6 THEN
					'Vertical (up)' 
					WHEN 8 THEN
					'Vertical (down)' ELSE ZORIENTATION 
				END AS 'Orientation',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZORIGINALORIENTATION 
					WHEN 1 THEN
					'Horizontal (left)' 
					WHEN 3 THEN
					'Horizontal (right)' 
					WHEN 6 THEN
					'Vertical (up)' 
					WHEN 8 THEN
					'Vertical (down)' ELSE ZORIENTATION 
				END AS 'Org_Orientation',
				ZADDITIONALASSETATTRIBUTES.ZTIMEZONENAME AS 'TimeZoneName',
				ZADDITIONALASSETATTRIBUTES.ZTIMEZONEOFFSET AS 'TimeZoneOffset',
				ZADDITIONALASSETATTRIBUTES.ZINFERREDTIMEZONEOFFSET AS 'InferredTimeZoneOffset',
				ZASSET.ZLOCATIONDATA AS 'FileLocationData',        
			CASE
					ZASSET.ZLATITUDE 
					WHEN - 180.0 THEN
					'' ELSE ZASSET.ZLATITUDE 
				END AS 'Latitude',
			CASE
					ZASSET.ZLONGITUDE 
					WHEN - 180.0 THEN
					'' ELSE ZASSET.ZLONGITUDE 
				END AS 'Longitude',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZSHIFTEDLOCATIONISVALID 
					WHEN 0 THEN
					'No' 
					WHEN 1 THEN
					'Yes' 
				END AS 'ShiftedLocationValid',
			CASE
					ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATAISVALID 
					WHEN 0 THEN
					'No_Check_SceneAnalysis' 
					WHEN 1 THEN
					'Yes_Check_SceneAnalysis' 
				END AS 'ReverseLocationDataIsValid',
				ZADDITIONALASSETATTRIBUTES.ZREVERSELOCATIONDATA AS 'OrgFileReverseLocationData',
				ZASSET.Zthumbnailindex AS 'ThumbnailIndex',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILWIDTH AS 'EmbeddedThumbnailWidth',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILHEIGHT AS 'EmbeddedThumbnailHeight',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILOFFSET AS 'EmbeddedThumbnailOffset',
				ZADDITIONALASSETATTRIBUTES.ZEMBEDDEDTHUMBNAILLENGTH AS 'EmbeddedThumbnailLenght',
				ZASSET.ZMOMENT AS 'MomentPK',
		ZMOMENT.ZTITLE AS 'MomentTitle',
				DateTime( ZMOMENT.ZSTARTDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentStartDate',
				DateTime( ZMOMENT.Zrepresentativedate + 978307200, 'UNIXEPOCH' ) AS 'MomentRepresentativeDate',
				DateTime( ZMOMENT.ZMODIFICATIONDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentModificationDate',
				DateTime( ZMOMENT.ZENDDATE + 978307200, 'UNIXEPOCH' ) AS 'MomentEndDate',
			CASE
					ZMOMENT.ZTRASHEDSTATE 
					WHEN 1 THEN
					'In_Trash' 
					WHEN 0 THEN
					'Not_In_Trash' ELSE ZMOMENT.ZTRASHEDSTATE 
				END AS 'MomentTrashState',
			CASE
					ZMOMENT.Zapproximatelatitude 
					WHEN - 180.0 THEN
					'' ELSE ZMOMENT.Zapproximatelatitude 
				END AS 'MomentApproxLatitude',
			CASE
					ZMOMENT.Zapproximatelongitude 
					WHEN - 180.0 THEN
					'' ELSE ZMOMENT.Zapproximatelongitude 
				END AS 'MomentApproxLongitude',
				ZASSET.ZUUID AS 'UUID',
				ZASSET.ZMEDIAGROUPUUID AS 'MediaGroupUUID',
				ZASSET.ZCLOUDASSETGUID AS 'CloudAssetGUID',
				ZADDITIONALASSETATTRIBUTES.ZPUBLICGLOBALUUID AS 'PublicGlobalUUID',
				ZADDITIONALASSETATTRIBUTES.ZMASTERFINGERPRINT AS 'MasterFingerprint',
				ZADDITIONALASSETATTRIBUTES.ZADJUSTEDFINGERPRINT AS 'AdjustedFingerprint' 
			FROM
				ZASSET
				LEFT JOIN ZMOMENT ON ZASSET.ZMOMENT = ZMOMENT.Z_PK
		JOIN ZADDITIONALASSETATTRIBUTES ON ZASSET.ZADDITIONALATTRIBUTES = ZADDITIONALASSETATTRIBUTES.Z_PK
				LEFT JOIN ZDETECTEDFACE ON ZADDITIONALASSETATTRIBUTES.ZASSET = ZDETECTEDFACE.ZASSET
				LEFT JOIN ZPERSON ON ZPERSON.Z_PK = ZDETECTEDFACE.ZPERSON
		LEFT JOIN Z_26ASSETS ON ZASSET.Z_PK = Z_26ASSETS.Z_3ASSETS
		LEFT JOIN ZGENERICALBUM ON ZGENERICALBUM.Z_PK = Z_26ASSETS.Z_26ALBUMS
		""")
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        counter = 0
        if usageentries > 0:
            for row in all_rows:
                postal_address = ''
                postal_address_subadminarea = ''
                postal_address_sublocality = ''

                if row[61] is not None:
                    pathto = os.path.join(
                        report_folder,
                        'ReverseLocationData' + str(counter) + '.bplist')
                    with open(pathto, 'ab') as wf:
                        wf.write(row[61])

                    with open(pathto, 'rb') as f:
                        try:
                            deserialized_plist = nd.deserialize_plist(f)
                            postal_address = deserialized_plist[
                                'postalAddress']['_formattedAddress']
                            postal_address_subadminarea = deserialized_plist[
                                'postalAddress']['_subAdministrativeArea']
                            postal_address_sublocality = deserialized_plist[
                                'postalAddress']['_subLocality']

                        except:
                            logfunc(
                                'Error reading exported bplist from Asset PK' +
                                row[0])
                            deserialized_plist = None

                data_list.append(
                    (row[0], row[0], postal_address,
                     postal_address_subadminarea, postal_address_sublocality,
                     row[1], row[2], row[3], row[4], row[5], row[6], row[7],
                     row[8], row[9], row[10], row[11], row[12], row[13],
                     row[14], row[15], row[16], row[17], row[18], row[19],
                     row[20], row[21], row[22], row[23], row[24], row[25],
                     row[26], row[27], row[28], row[29], row[30], row[31],
                     row[32], row[33], row[34], row[35], row[36], row[37],
                     row[38], row[39], row[40], row[41], row[42], row[43],
                     row[44], row[45], row[46], row[47], row[48], row[49],
                     row[50], row[51], row[52], row[53], row[54], row[55],
                     row[56], row[57], row[58], row[59], row[60], row[61],
                     row[62], row[63], row[64], row[65], row[66], row[67],
                     row[68], row[69], row[70], row[71], row[72], row[73],
                     row[74], row[75], row[76], row[77], row[78], row[79],
                     row[80], row[81]))

                counter += 1

            description = ''
            report = ArtifactHtmlReport('Photos.sqlite')
            report.start_artifact_report(report_folder, 'Metadata',
                                         description)
            report.add_script()
            data_headers = (
                'Timestamp', 'Date Created', 'Postal Address',
                'Postal Subadmin Area', 'Postal Sublocality', 'Asset ZPK',
                'Add Attributes Key', 'Detected Face Asset', 'Kind',
                'EXIF Timestamp', 'Scene Analysis Timestamp',
                'Analysis State Modified Date', 'Add Date', 'Filename',
                'Original Filename', 'Album Title', 'Creator Bundle ID',
                'Editor Bundle ID', 'Directory', 'Uniform ID',
                'Saved Asset Type', 'Face Detected in Photo', 'Display Name',
                'Full Name', 'Face Count', 'Person', 'Contact Blob',
                'Person UUID', 'Detected Face Quality', 'Age Type Estimate',
                'Gender', 'Glasses Type', 'Facial Hair Type', 'Baldness',
                'Color Space', 'Duration', 'Video Duration', 'Complete',
                'Visibility State', 'Favorite', 'Hidden Fie?', 'Trash State',
                'File Trash Date', 'View Count', 'Play Count', 'Share Count',
                'Last Shared Date', 'File Modification Date',
                'Has Adjustments?', 'Adjustment Timestamp',
                'Original File Size', 'File Height', 'Org File Height',
                'File Width', 'Org File Width', 'Orientation',
                'Org Orientation', 'Timezone Name', 'Timezone Offset',
                'Infered Timezone Offset', 'File Location Data', 'Latitude',
                'Longitude', 'Shifted Location Valid',
                'Reverse Lcoation Data is Valid',
                'Org File Reverse Location Data', 'Thumbnail Index',
                'Embedded Thumbnail Width', 'Embedded Thumbnail Height',
                'Embedded Thumbnail Offset', 'Embedded Thumbnail Lenght',
                'Moment PK', 'Moment Title', 'Moment Start Date',
                'Moment Representative Date', 'Moment Modification Date',
                'Moment End Date', 'Moment Trash State',
                'Moment Approx Latitude', 'Moment Approx Longitude', 'UUID',
                'Media Group UUID', 'Cloud Assest GUID', 'Public Global UUID',
                'Master Fingetprint', 'Adjusted Fingerprint')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Photos-sqlite Metadata'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Photos-sqlite Metadata'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Photos-sqlite Metadata'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No data available for Photos.sqlite metadata')

        db.close()
        return
Exemplo n.º 16
0
def get_mapsSync(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('MapsSync_0.0.1'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(ZHISTORYITEM.ZCREATETIME+978307200,'UNIXEPOCH','localtime') AS 'Time Created',
        datetime(ZHISTORYITEM.ZMODIFICATIONTIME+978307200,'UNIXEPOCH','localtime') AS 'Time Modified',
        ZHISTORYITEM.z_pk AS 'Item Number',
        CASE
        when ZHISTORYITEM.z_ent = 14 then 'coordinates of search'
        when ZHISTORYITEM.z_ent = 16 then 'location search'
        when ZHISTORYITEM.z_ent = 12 then 'navigation journey'
        end AS 'Type',
        ZHISTORYITEM.ZQUERY AS 'Location Search',
        ZHISTORYITEM.ZLOCATIONDISPLAY AS 'Location City',
        ZHISTORYITEM.ZLATITUDE AS 'Latitude',
        ZHISTORYITEM.ZLONGITUDE AS 'Longitude',
        ZHISTORYITEM.ZROUTEREQUESTSTORAGE AS 'Journey BLOB',
        ZMIXINMAPITEM.ZMAPITEMSTORAGE as 'Map Item Storage BLOB'
        from ZHISTORYITEM
        left join ZMIXINMAPITEM on ZMIXINMAPITEM.Z_PK=ZHISTORYITEM.ZMAPITEM
        ''')

        # Above query courtesy of CheekyForensicsMonkey
        # https://cheeky4n6monkey.blogspot.com/2020/11/ios14-maps-history-blob-script.html

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                #print(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])
                directa = ''
                directb = ''
                mapitem = ''
                agg1 = ''
                if row[8] is None:
                    pass
                #pp = pprint.PrettyPrinter(indent = 1)
                #pp.pprint(message)
                else:
                    message, types = blackboxprotobuf.decode_message(row[8])

                    for x in message['1']:
                        for y in x['2']['1']['4']:
                            z = y.get('8')
                            if z == None:
                                pass
                            else:
                                if isinstance(z, dict):
                                    w = z.get('31')
                                    if w == None:
                                        pass
                                    else:
                                        three = get_recursively(w, '3')
                                        if three[1] == b'create':
                                            #print(f'Three: {three[1]}')
                                            if message['1'][1]['1'].get(
                                                    '2') is not None:
                                                for address in (
                                                        message['1'][1]['1']
                                                    ['2']['6']):
                                                    directa = directa + ' ' + (
                                                        address.decode(
                                                            'latin-1'))
                                                    #print(row[0],directa, 'directa')
                                                if agg1 == '':
                                                    agg1 = directa
                                                    directa = ''
                                                else:
                                                    agg1 = agg1 + ' <---> ' + directa

                                            else:
                                                for address in (w['1']['101']
                                                                ['2']['11']):
                                                    directa = directa + ' ' + (
                                                        address.decode(
                                                            'latin-1'))
                                                    #print(row[0], directb, 'directb')
                                                if agg1 == '':
                                                    agg1 = directa
                                                    directa = ''
                                                else:
                                                    agg1 = agg1 + ' <---> ' + directa

                if row[9] is None:
                    pass
                else:
                    message, types = blackboxprotobuf.decode_message(row[9])
                    #pp = pprint.PrettyPrinter(indent = 1)
                    #pp.pprint(message['1']['4'])#[7]['8']['31']['1']['101']['2']['11'])
                    get101 = (get_recursively(message, '101'))

                    for address in (get101[0]['2']['11']):
                        mapitem = mapitem + ' ' + (address.decode('latin-1'))

                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], agg1, mapitem))
                agg1 = ''

        if usageentries > 0:
            description = 'Disclaimer: Entries should be corroborated. Locations and searches from other linked devices might show up here. Travel should be confirmed. Medium confidence.'
            report = ArtifactHtmlReport('MapsSync')
            report.start_artifact_report(report_folder, 'MapsSync',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Modified Time', 'Item Number',
                            'Type', 'Location Search', 'Location City',
                            'Latitude', 'Longitude', 'Journey BLOB Item',
                            'Map Item Storage BLOB item')

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'MapsSync'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'MapsSync'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'MapsSync'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No MapsSync data available')
Exemplo n.º 17
0
def get_locationDappharvest(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
        return ()

    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_appharvest.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	BUNDLEID AS "BUNDLE ID",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	ALTITUDE AS "ALTITUDE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	STATE AS "STATE",
	AGE AS "AGE",
	ROUTINEMODE AS "ROUTINE MODE",
	LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
	HEX(SIG) AS "SIG (HEX)",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE"
	FROM APPHARVEST
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD App Harvest')
        report.start_artifact_report(report_folder, 'App Harvest', description)
        report.add_script()
        data_headers = ('Timestamp', 'Bundle ID', 'Coordinates', 'Altitude',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'State',
                        'Age', 'Routine Mode', 'Location of Interest Type',
                        'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                        'Course', 'Confidence')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell App Harvest'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell App Harvest'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD Cell App Harvest'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD App Harvest')

    db.close()
    return
Exemplo n.º 18
0
def get_teamsSegment(files_found, report_folder, seeker):
    data_list_location = []
    data_list_motion = []
    data_list_timecheck = []
    data_list_power = []
    data_list_statechange = []

    for file_found in files_found:
        with open(file_found) as file:
            for line in file:
                serial = json.loads(line)
                timestamp = serial[0].replace('T', ' ')
                #print(serial[1])
                if serial[1] == 'location':
                    locationtimestamp = serial[2]['sourceTimestamp']
                    locationtimestamp = locationtimestamp.replace('T', ' ')
                    longitude = serial[2]['longitude']
                    latitude = serial[2]['latitude']
                    speed = serial[2]['speed']
                    altitude = serial[2]['altitude']
                    vertacc = serial[2]['verticalAccuracy']
                    horiacc = serial[2]['horizontalAccuracy']
                    data_list_location.append(
                        (locationtimestamp, longitude, latitude, speed,
                         altitude, vertacc, horiacc))

                if serial[1] == 'motion':
                    motionact = (serial[2]['activityName'])
                    data_list_motion.append((timestamp, motionact))

                if serial[1] == 'timeCheck':
                    tczone = serial[2]['timezone']
                    tcoffset = serial[2]['offset']
                    tcreason = serial[2]['reason']
                    data_list_timecheck.append(
                        (timestamp, tczone, tcoffset, tcreason))

                if serial[1] == 'power':
                    plugged = serial[2]['isPluggedIn']
                    batlvl = serial[2]['batteryLevel']
                    data_list_power.append((timestamp, plugged, batlvl))

                if serial[1] == 'stateChange':
                    agg = ' '
                    for a, b in serial[2].items():
                        agg = agg + (f'{a}: {b} ')
                    agg = agg.lstrip()
                    data_list_statechange.append((timestamp, agg))

    if len(data_list_location) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Locations')
        report.start_artifact_report(report_folder, 'Teams Locations')
        report.add_script()
        data_headers_location = ('Timestamp', 'Longitude', 'Latitude', 'Speed',
                                 'Altitude', 'Vertical Accuracy',
                                 'Horizontal Accuracy')
        report.write_artifact_data_table(data_headers_location,
                                         data_list_location, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Locations'
        tsv(report_folder, data_headers_location, data_list_location, tsvname)

        tlactivity = 'Microsoft Teams Locations'
        timeline(report_folder, tlactivity, data_list_location,
                 data_headers_location)

        kmlactivity = 'Microsoft Teams Locations'
        kmlgen(report_folder, kmlactivity, data_list_location,
               data_headers_location)
    else:
        logfunc('No Microsoft Teams Locations data')

    if len(data_list_motion) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Motion')
        report.start_artifact_report(report_folder, 'Teams Motion')
        report.add_script()
        data_headers_motion = ('Timestamp', 'Activity')
        report.write_artifact_data_table(data_headers_motion, data_list_motion,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Motion'
        tsv(report_folder, data_headers_motion, data_list_motion, tsvname)

        tlactivity = 'Microsoft Teams Motion'
        timeline(report_folder, tlactivity, data_list_motion,
                 data_headers_motion)

    else:
        logfunc('No Microsoft Teams Motion data')

    if len(data_list_timecheck) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Timezone')
        report.start_artifact_report(report_folder, 'Teams Timezone')
        report.add_script()
        data_headers_timecheck = ('Timestamp', 'Timezone', 'Timezone Offset',
                                  'Timezone reason')
        report.write_artifact_data_table(data_headers_timecheck,
                                         data_list_timecheck, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Timezone'
        tsv(report_folder, data_headers_timecheck, data_list_timecheck,
            tsvname)

        tlactivity = 'Microsoft Teams Timezone'
        timeline(report_folder, tlactivity, data_list_timecheck,
                 data_headers_timecheck)

    else:
        logfunc('No Microsoft Teams Timezone data')

    if len(data_list_power) > 0:
        report = ArtifactHtmlReport('Microsoft Teams Power Log')
        report.start_artifact_report(report_folder, 'Teams Power Log')
        report.add_script()
        data_headers_power = ('Timestamp', 'Is plugged in?', 'Battery Level')
        report.write_artifact_data_table(data_headers_power, data_list_power,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Power Log'
        tsv(report_folder, data_headers_power, data_list_power, tsvname)

        tlactivity = 'Microsoft Teams Power Log'
        timeline(report_folder, tlactivity, data_list_power,
                 data_headers_power)

    else:
        logfunc('No Microsoft Teams Power Log data')

    if len(data_list_statechange) > 0:
        report = ArtifactHtmlReport('Microsoft Teams State Change')
        report.start_artifact_report(report_folder, 'Teams State Change')
        report.add_script()
        data_headers_statechange = ('Timestamp', 'Change')
        report.write_artifact_data_table(data_headers_statechange,
                                         data_list_statechange, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams State Change'
        tsv(report_folder, data_headers_statechange, data_list_statechange,
            tsvname)

        tlactivity = 'Microsoft Teams State Change'
        timeline(report_folder, tlactivity, data_list_statechange,
                 data_headers_statechange)

    else:
        logfunc('No Microsoft Teams Power State Change')
Exemplo n.º 19
0
def get_routineDCloud(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('Cloud-V2.sqlite'):
            break

    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    if version.parse(iOSversion) >= version.parse("12"):
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_cloud_visit_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT 
            DATETIME(ZRTADDRESSMO.ZCREATIONDATE + 978307200, 'unixepoch') AS "ADDRESS CREATION DATE",
            DATETIME(ZRTADDRESSMO.ZEXPIRATIONDATE + 978307200, 'unixepoch') AS "ADDRESS EXPIRATION DATE",
            ZRTADDRESSMO.ZCOUNTRY AS "COUNTRY",
            ZRTADDRESSMO.ZCOUNTRYCODE AS "COUNTRY CODE",
            ZRTADDRESSMO.ZPOSTALCODE AS "POSTAL CODE",
            ZRTADDRESSMO.ZLOCALITY AS "LOCALITY",
            ZRTADDRESSMO.ZSUBLOCALITY AS "SUBLOCALITY",
            ZRTADDRESSMO.ZTHOROUGHFARE AS "THROROUGHFARE",
            ZRTADDRESSMO.ZSUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
            ZRTADDRESSMO.ZSUBADMINISTRATIVEAREA AS "SUBADMINISTRATIVE AREA",  
            ZRTADDRESSMO.ZAREASOFINTEREST AS "AREA OF INTEREST",
            ZRTADDRESSMO.ZOCEAN AS "OCEAN",
            ZRTADDRESSMO.ZINLANDWATER AS "INLAND WATER",
            ZRTADDRESSMO.ZISLAND AS "ISLAND",
            ZRTADDRESSMO.Z_PK  AS "ZRTADDRESSMO TABLE ID" 
        FROM ZRTADDRESSMO
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13], row[14]))

            description = 'Address'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Cloud Addresses',
                                         description)
            report.add_script()
            data_headers = ('Address Creation Date', 'Address Expiration Date',
                            'Country', 'Country Code', 'Postal Code',
                            'Locality', 'Sublocality', 'Throroughfare',
                            'Subthroroughfare', 'Subadministrative Area',
                            'Area of Interest', 'Ocean', 'Inland Water',
                            'Island', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Cloud Addresses'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Cloud Addresses'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Cloud Addresses data available')

    if version.parse(iOSversion) >= version.parse("13"):
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_cloud_visit_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
      SELECT
        DATETIME(ZRTMAPITEMMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM CREATION DATE",
        DATETIME(ZRTMAPITEMMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM EXPIRATION DATE", 
        ZRTMAPITEMMO.ZLATITUDE || ", " || ZRTMAPITEMMO.ZLONGITUDE AS "MAP ITEM COORDINATES",
        ZRTMAPITEMMO.ZNAME AS "MAP ITEM NAME",
        ZRTADDRESSMO.ZCOUNTRY AS "COUNTRY",
        ZRTADDRESSMO.ZCOUNTRYCODE AS "COUNTRY CODE",
        ZRTADDRESSMO.ZPOSTALCODE AS "POSTAL CODE",
        ZRTADDRESSMO.ZLOCALITY AS "LOCALITY",
        ZRTADDRESSMO.ZSUBLOCALITY AS "SUBLOCALITY",
        ZRTADDRESSMO.ZTHOROUGHFARE AS "THROROUGHFARE",
        ZRTADDRESSMO.ZSUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
        ZRTADDRESSMO.ZSUBADMINISTRATIVEAREA AS "SUBADMINISTRATIVE AREA",  
        ZRTADDRESSMO.ZAREASOFINTEREST AS "AREA OF INTEREST",
        ZRTADDRESSMO.ZOCEAN AS "OCEAN",
        ZRTADDRESSMO.ZINLANDWATER AS "INLAND WATER",
        ZRTADDRESSMO.ZISLAND AS "ISLAND",
        ZRTMAPITEMMO.ZLATITUDE AS "LATITUTE",
        ZRTMAPITEMMO.ZLONGITUDE AS "LONGITUDE",
        ZRTMAPITEMMO.ZUNCERTAINTY AS "UNCERTAINTY",
        ZRTMAPITEMMO.ZDISPLAYLANGUAGE AS "MAP ITEM LANGUAGE",
        ZRTMAPITEMMO.Z_PK  AS "ZRTMAPITEMMO TABLE ID" 
          FROM ZRTMAPITEMMO
          LEFT JOIN ZRTADDRESSMO ON  ZRTMAPITEMMO.Z_PK == ZRTADDRESSMO.ZMAPITEM
      ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16], row[17], row[18],
                     row[19], row[20]))

            description = 'RoutineD Cloud Map Items'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Cloud Map Items',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Map Item Expiration Date',
                            'Map Item Coordinates', 'Map Item Name', 'Country',
                            'Country Code', 'Postal Code', 'Locality',
                            'Sublocality', 'Throroughfare', 'Subthroroughfare',
                            'Subadministrative Area', 'Area of Interest',
                            'Ocean', 'Inland Water', 'Island', 'Latitude',
                            'Longitude', 'Uncertainty', 'Map Item Language',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Cloud Map Items'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Cloud Map Items'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD Cloud Map Items'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Map Items Cloud-V2.sqlite data available')

    if (version.parse(iOSversion) >= version.parse("12")) and (
            version.parse(iOSversion) < version.parse("13")):
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_cloud_visit_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
      SELECT 
        DATETIME(ZRTMAPITEMMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM CREATION DATE",
        DATETIME(ZRTMAPITEMMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM EXPIRATION DATE",
        ZRTMAPITEMMO.ZLATITUDE || ", " || ZRTMAPITEMMO.ZLONGITUDE AS "MAP ITEM COORDINATES",
        ZRTMAPITEMMO.ZNAME AS "MAP ITEM NAME",
        ZRTADDRESSMO.ZCOUNTRY AS "COUNTRY",
        ZRTADDRESSMO.ZCOUNTRYCODE AS "COUNTRY CODE",
        ZRTADDRESSMO.ZPOSTALCODE AS "POSTAL CODE",
        ZRTADDRESSMO.ZLOCALITY AS "LOCALITY",
        ZRTADDRESSMO.ZSUBLOCALITY AS "SUBLOCALITY",
        ZRTADDRESSMO.ZTHOROUGHFARE AS "THROROUGHFARE",
        ZRTADDRESSMO.ZSUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
        ZRTADDRESSMO.ZSUBADMINISTRATIVEAREA AS "SUBADMINISTRATIVE AREA",  
        ZRTADDRESSMO.ZAREASOFINTEREST AS "AREA OF INTEREST",
        ZRTADDRESSMO.ZOCEAN AS "OCEAN",
        ZRTADDRESSMO.ZINLANDWATER AS "INLAND WATER",
        ZRTADDRESSMO.ZISLAND AS "ISLAND",
        ZRTMAPITEMMO.ZLATITUDE AS "LATITUTE",
        ZRTMAPITEMMO.ZLONGITUDE AS "LONGITUDE",
        ZRTMAPITEMMO.ZUNCERTAINTY AS "UNCERTAINTY",
        ZRTMAPITEMMO.Z_PK  AS "ZRTMAPITEMMO TABLE ID" 
          FROM ZRTMAPITEMMO
          LEFT JOIN ZRTADDRESSMO ON  ZRTMAPITEMMO.Z_PK == ZRTADDRESSMO.ZMAPITEM      
      ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[14], row[15], row[16], row[17], row[18], row[19]))

            description = 'RoutineD Cloud Map Items'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Cloud Map Items',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Map Item Expiration Date',
                            'Map Item Coordinates', 'Map Item Name', 'Country',
                            'Country Code', 'Postal Code', 'Locality',
                            'Sublocality', 'Throroughfare', 'Subthroroughfare',
                            'Subadministrative Area', 'Area of Interest',
                            'Ocean', 'Inland Water', 'Island', 'Latitude',
                            'Longitude', 'Uncertainty', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Cloud Map Items'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Cloud Map Items'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD Cloud Map Items'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Cloud Map Items data available')

    if (version.parse(iOSversion) >= version.parse("13")):
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_cloud_visit_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
      SELECT
          DATETIME(ZRTLEARNEDVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "VISIT ENTRY",
          DATETIME(ZRTLEARNEDVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "VISIT EXIT",
          (ZEXITDATE-ZENTRYDATE)/60.00 AS "VISIT TIME (MINUTES)",
          ZRTLEARNEDVISITMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDVISITMO.ZLOCATIONLONGITUDE AS "COORDINATES",
          ZRTLEARNEDVISITMO.ZPLACE AS "PLACE ID",
          ZRTLEARNEDVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
          ZRTADDRESSMO.ZCOUNTRY AS "COUNTRY",
          ZRTADDRESSMO.ZCOUNTRYCODE AS "COUNTRY CODE",
          ZRTADDRESSMO.ZPOSTALCODE AS "POSTAL CODE",
          ZRTADDRESSMO.ZLOCALITY AS "LOCALITY",
          ZRTADDRESSMO.ZSUBLOCALITY AS "SUBLOCALITY",
          ZRTADDRESSMO.ZTHOROUGHFARE AS "THROROUGHFARE",
          ZRTADDRESSMO.ZSUBTHOROUGHFARE AS "SUBTHOROUGHFARE",
          ZRTADDRESSMO.ZSUBADMINISTRATIVEAREA AS "SUBADMINISTRATIVE AREA",  
          ZRTADDRESSMO.ZAREASOFINTEREST AS "AREA OF INTEREST",
          ZRTADDRESSMO.ZOCEAN AS "OCEAN",
          ZRTADDRESSMO.ZINLANDWATER AS "INLAND WATER",
          ZRTADDRESSMO.ZISLAND AS "ISLAND",
          ZRTLEARNEDVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
          ZRTLEARNEDVISITMO.ZCONFIDENCE AS "CONFIDENCE",
          DATETIME(ZRTLEARNEDVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "VISIT CREATION",
          DATETIME(ZRTLEARNEDVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "VISIT EXPIRATION",
          ZRTDEVICEMO.ZDEVICECLASS AS "DEVICE CLASS",
          ZRTDEVICEMO.ZDEVICEMODEL AS "DEVICE MODEL",
          ZRTDEVICEMO.ZDEVICENAME AS "DEVICE NAME",
          DATETIME(ZRTLEARNEDPLACEMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "LEARNED PLACE CREATION",
          DATETIME(ZRTLEARNEDPLACEMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "LEARNED PLACE EXPIRATION",
          DATETIME(ZRTADDRESSMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "ADDRESS CREATION",   
          ZRTLEARNEDVISITMO.ZLOCATIONLATITUDE AS "LATITUDE",
          ZRTLEARNEDVISITMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
          ZRTMAPITEMMO.ZLATITUDE || ", " || ZRTMAPITEMMO.ZLONGITUDE AS "MAP ITEM COORDINATES",
          DATETIME(ZRTMAPITEMMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM CREATION DATE",
          DATETIME(ZRTMAPITEMMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "MAP ITEM EXPIRATION DATE",
          ZRTMAPITEMMO.ZLATITUDE AS "MAP ITEM LATITUTE",
          ZRTMAPITEMMO.ZLONGITUDE AS "MAP ITEM LONGITUDE",
          ZRTMAPITEMMO.ZUNCERTAINTY AS "UNCERTAINTY",
          ZRTMAPITEMMO.ZDISPLAYLANGUAGE AS "MAP ITEM LANGUAGE",
          ZRTMAPITEMMO.ZNAME AS "MAP ITEM NAME",
          ZRTLEARNEDVISITMO.Z_PK AS "ZRTLEARNEDVISITMO TABLE ID" 
        FROM
          ZRTLEARNEDVISITMO 
          LEFT JOIN
            ZRTDEVICEMO ON ZRTLEARNEDVISITMO.ZDEVICE = ZRTDEVICEMO.Z_PK 
          LEFT JOIN
            ZRTLEARNEDPLACEMO ON ZRTLEARNEDPLACEMO.Z_PK = ZRTLEARNEDVISITMO.ZPLACE
          LEFT JOIN
            ZRTADDRESSMO ON ZRTADDRESSMO.ZMAPITEM = ZRTLEARNEDPLACEMO.ZMAPITEM
          LEFT JOIN
            ZRTMAPITEMMO ON ZRTMAPITEMMO.Z_PK = ZRTLEARNEDPLACEMO.ZMAPITEM
            
      ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16], row[17], row[18],
                     row[19], row[20], row[21], row[22], row[23], row[24],
                     row[25], row[26], row[27], row[28], row[29], row[30],
                     row[31], row[32], row[33], row[34], row[35], row[36],
                     row[37], row[38]))

            description = 'Significant Locations - Vist Entry & Exit (Historical)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Cloud Visit Entry Exit',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Visit Exit', 'Visit Time (Minutes)',
                            'Coordinates', 'Place ID', 'Data Point Count',
                            'Country', 'Country Code', 'Postal Code',
                            'Locality', 'Sublocality', 'Throroughfare',
                            'Subthroroughfare', 'Subadministrative Area',
                            'Area of Interest', 'Ocean', 'Inland Water',
                            'Island', 'Location Uncertainty', 'Confidence',
                            'Visit Creation', 'Visit Expiration',
                            'Device Class', 'Device Model', 'Device Name',
                            'Learned Placed Creation',
                            'Learned Place Expiration', 'Address Creation',
                            'Latitude', 'Longitude', 'Map Item Coordinates',
                            'Map Item Creation Date',
                            'Map Item Expiration Date', 'Map Item Latitude',
                            'Map Item Longitude', 'Uncertainty',
                            'Map Item Language', 'Map Item Name', 'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Cloud Visit Entry Exit'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Cloud Visit Entry Exit'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD Cloud Visit Entry Exit'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc(
                'No RoutineD Significant Locations - Vist Entry & Exit (Historical)'
            )
Exemplo n.º 20
0
def get_viber(files_found, report_folder, seeker):
	viber_settings = {}
	for file_found in files_found:
		file_found = str(file_found)

		iOSversion = scripts.artifacts.artGlobals.versionf
		if version.parse(iOSversion) < version.parse("14"):
			logfunc("Viber parsing has not be tested on this iOS " + iOSversion + " version. Please contact @theAtropos4n6 for resolving this issue.")

		if version.parse(iOSversion) >= version.parse("14"):
			if file_found.endswith('Settings.data'):
				db = open_sqlite_db_readonly(file_found)
				cursor = db.cursor()
				cursor.execute('''
					SELECT
						Data.key,value
					FROM Data
					WHERE Data.key IN 
						(
						'_myUserName',
						'_currentEmail',
						'_myPhoneNumber',
						'_myCanonizedPhoneNumber',
						'_myFormattedPhoneNumber',
						'_myCountryPhoneCode',
						'_myCountryCode',
						'_myLanguageCode',
						'_wasabiLastKnownUserLocation',
						'_uid',
						'_appVersion',
						'_savedDeviceId',
						'_attemptsToDownloadBackupForRestore',
						'_backupAttemptsCount',
						'_hiddenChatsPINData',
						'_myPhotoLocalID'
						)
					UNION
					SELECT
						Data.key,
						CASE 
						WHEN value LIKE '-%' THEN value
						ELSE datetime(value,'unixepoch')
						END
					FROM Data
					WHERE Data.key IN 
						('_registrationDate',
						'_autoBackupLastRunTime',
						'_lastBackupStartDate')
					UNION
					SELECT
						Data.key,
						datetime(value,'unixepoch') -- this value is stored in the user localtime
					FROM Data
					WHERE Data.key IS '_birthdate'
					ORDER BY value
				''')
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows:
						viber_settings[row[0]] = row[1]
						temp_list = list(row)
						if temp_list[0] == '_appVersion':
							temp_list[0] = 'Application Version'
						elif temp_list[0] == '_lastBackupStartDate':
							temp_list[0] = 'Last Backup Start Date - UTC'
						elif temp_list[0] == '_myUserName':
							temp_list[0] = 'User Name'
						elif temp_list[0] == '_currentEmail':
							temp_list[0] = 'Current Email'
						elif temp_list[0] == '_birthdate':
							temp_list[0] = "Birth Date - UTC (apply user's localtime offset)"
						elif temp_list[0] == '_registrationDate':
							temp_list[0] = 'Registration Date - UTC'
						elif temp_list[0] == '_uid':
							temp_list[0] = 'User ID'
						elif temp_list[0] == '_myPhoneNumber':
							temp_list[0] = 'Phone Number'
						elif temp_list[0] == '_myCanonizedPhoneNumber':
							temp_list[0] = 'Canonized Phone Number'
						elif temp_list[0] == '_myFormattedPhoneNumber':
							temp_list[0] = 'Formatted Phone Number'
						elif temp_list[0] == '_myCountryPhoneCode':
							temp_list[0] = 'Country Phone Code'
						elif temp_list[0] == '_myCountryCode':
							temp_list[0] = 'Country Code'
						elif temp_list[0] == '_myLanguageCode':
							temp_list[0] = 'Language Code'
						elif temp_list[0] == '_wasabiLastKnownUserLocation':
							temp_list[0] = 'Last Known User Location'
						elif temp_list[0] == '_savedDeviceId':
							temp_list[0] = 'Device ID'
						elif temp_list[0] == '_myPhotoLocalID':
							temp_list[0] = 'Profile Picture'
							try:
								if temp_list[1] is not None:
									thumb = media_to_html(temp_list[1], files_found, report_folder)
									temp_list[1] = thumb
								else:
									thumb = ''								
							except:
								pass
						elif temp_list[0] == '_attemptsToDownloadBackupForRestore':
							temp_list[0] = 'Attempts To Download Backup For Restore'
							try:
								int.from_bytes(temp_list[1], byteorder='big') #needs further validation about the byteorder
							except Exception as err:
								logfunc(f'Viber - Settings "_attemptsToDownloadBackupForRestore" could not be extracted. The error was: {err}' )
						elif temp_list[0] == '_backupAttemptsCount':
							temp_list[0] = 'Backup Attempts Count'
							try:
								int.from_bytes(temp_list[1], byteorder='big') #needs further validation about the byteorder
							except Exception as err:
								logfunc(f'Viber - Settings "_backupAttemptsCount" could not be extracted. The error was: {err}' )
						elif temp_list[0] == '_autoBackupLastRunTime':
							temp_list[0] = 'Auto Backup Last Run Time - UTC'
							x = str(temp_list[1])
							if x.startswith("-"):
								temp_list[1] = 'Not Applied'
						elif temp_list[0] == '_lastBackupStartDate':
							x = str(temp_list[1])
							if x.startswith("-"):
								temp_list[1] = 'Not Applied'
						elif temp_list[0] == '_hiddenChatsPINData':
							temp_list[0] = 'Hidden Chats PIN Data'
						row = tuple(temp_list)
						data_list.append((row[0], row[1]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Settings')
					report.start_artifact_report(report_folder, 'Viber - Settings')
					report.add_script()
					data_headers = ('Setting','Value')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()

					tsvname = 'Viber - Settings'
					tsv(report_folder, data_headers, data_list, tsvname)
		
				db.close()

			elif file_found.endswith('Contacts.data'):

				db = open_sqlite_db_readonly(file_found)
				cursor = db.cursor()

				cursor.execute('''
					SELECT
						ZABCONTACT.ZMAINNAME AS 'Main Name',
						ZABCONTACT.ZPREFIXNAME AS 'Prefix Name',
						ZABCONTACT.ZSUFFIXNAME AS 'Suffix Name',
						ZABCONTACTNUMBER.ZPHONE AS 'Phone Number',
						ZABCONTACTNUMBER.ZCANONIZEDPHONENUM AS 'Canonized Phone Number',
						ZABCONTACT.ZCONTACTID AS 'Contact ID'
						FROM ZABCONTACT
						LEFT JOIN ZABCONTACTNUMBER ON ZABCONTACT.Z_PK = ZABCONTACTNUMBER.ZCONTACT
						UNION
						SELECT
						ZABCONTACT.ZMAINNAME AS 'Main Name',
						ZABCONTACT.ZPREFIXNAME AS 'Prefix Name',
						ZABCONTACT.ZSUFFIXNAME AS 'Suffix Name',
						ZABCONTACTNUMBER.ZPHONE AS 'Phone Number',
						ZABCONTACTNUMBER.ZCANONIZEDPHONENUM AS 'Canonized Phone Number',
						ZABCONTACT.ZCONTACTID AS 'Contact ID'
					FROM ZABCONTACTNUMBER
					LEFT JOIN ZABCONTACT ON ZABCONTACT.Z_PK = ZABCONTACTNUMBER.ZCONTACT
					ORDER BY ZMAINNAME
				''')
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows: 
						data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Contacts')
					report.start_artifact_report(report_folder, 'Viber - Contacts')
					report.add_script()
					data_headers = ('Main Name','Prefix Name','Suffix Name','Phone Number','Canonized Phone Number','Contact ID')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()


					tsvname = 'Viber - Contacts'
					tsv(report_folder, data_headers, data_list, tsvname)
					
				cursor.execute('''
					SELECT
						datetime(ZRECENT.ZDATE+ 978307200,'unixepoch') AS 'Timestamp - UTC',
						ZRECENT.ZRECENTSLINE AS 'EMPTY DUMMY COLUMN',
						CASE
								WHEN ZRECENT.ZCALLTYPE = 'missed' THEN 'Missed Audio Call'
								WHEN ZRECENT.ZCALLTYPE = 'missed_with_video' THEN 'Missed Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'outgoing_viber' THEN 'Outgoing Audio Call'
								WHEN ZRECENT.ZCALLTYPE = 'outgoing_viber_with_video' THEN 'Outgoing Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'incoming_with_video' THEN 'Incoming Video Call'
								WHEN ZRECENT.ZCALLTYPE = 'incoming' THEN 'Incoming Audio Call'
								ELSE ZRECENT.ZCALLTYPE
						end  AS 'Call Type',
						ZRECENT.ZDURATION AS 'Duration'
					FROM ZRECENT
					WHERE ZRECENT.ZCALLLOGMESSAGE IS NULL AND ZRECENT.ZRECENTSLINE IS NULL
				''')

				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows: 
						temp_list = list(row)
						try:
							if 'Outgoing' in temp_list[2]:
								temp_list[1] = str(viber_settings['_myUserName']) + ',' + str(viber_settings['_myPhoneNumber'])
						except TypeError:
							pass
						row = tuple(temp_list)
						data_list.append((row[0], row[1], row[2], row[3]))



				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Call Remnants')
					report.start_artifact_report(report_folder, 'Viber - Call Remnants')
					report.add_script()
					data_headers = ('Timestamp - UTC','Caller','Call Type','Duration')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
					report.end_artifact_report()


				cursor.execute('''
					SELECT 	
							CHAT_MEMBER.ZDISPLAYFULLNAME AS 'Sender (Display Full Name)',
							CHAT_MEMBER.ZDISPLAYSHORTNAME AS 'Sender (Display Short Name)',
							CHAT_MEMBER.ZPHONE AS 'Sender (Phone)',
							CHATS.Chat_Name AS 'Chat Name',
							CHATS.CHAT_MEMBERS AS 'Chat Participant(s)',
							CHATS.CHAT_PHONES 'Chat Phone(s)',
							datetime(ZVIBERMESSAGE.ZSTATEDATE+ 978307200,'unixepoch') AS 'Message Creation Date - UTC',
							datetime(ZVIBERMESSAGE.ZDATE+ 978307200,'unixepoch') AS 'Message Change State Date - UTC',
							datetime(RECENT.ZRECENTDATE+ 978307200,'unixepoch') AS 'Call Date - UTC',
							CASE
								WHEN ZCALLTYPE = 'missed' THEN 'Missed Audio Call'
								WHEN ZCALLTYPE = 'missed_with_video' THEN 'Missed Video Call'
								WHEN ZCALLTYPE = 'outgoing_viber' THEN 'Outgoing Audio Call'
								WHEN ZCALLTYPE = 'outgoing_viber_with_video' THEN 'Outgoing Video Call'
								WHEN ZCALLTYPE = 'incoming_with_video' THEN 'Incoming Video Call'
								WHEN ZCALLTYPE = 'incoming' THEN 'Incoming Audio Call'
								ELSE ZCALLTYPE
							end  AS 'Call Type',
							CASE
								WHEN ZVIBERMESSAGE.ZSTATE IN ('send','delivered') THEN 'Outgoing'
								WHEN ZVIBERMESSAGE.ZSTATE = 'received' THEN 'Incoming'
								ELSE ZVIBERMESSAGE.ZSTATE
							END AS 'State',
							RECENT.ZDURATION AS 'Duration',
							ZVIBERMESSAGE.ZSYSTEMTYPE 'System Type Description',
							ZVIBERMESSAGE.ZMETADATA AS 'Message Metadata',
							ZVIBERMESSAGE.ZTEXT AS 'Message Content',
							ZATTACHMENT.ZNAME AS 'Attachment Name',
							ZATTACHMENT.ZTYPE AS 'Attachment Type',
							ZATTACHMENT.ZFILESIZE AS 'Attachment Size',
							ZVIBERLOCATION.ZLATITUDE AS 'Latitude',
							ZVIBERLOCATION.ZLONGITUDE AS 'Longitude',
							CASE
								WHEN CHATS.Chat_Deleted = 1 THEN 'True'
								WHEN CHATS.Chat_Deleted = 0 THEN 'False'
								ELSE CHATS.Chat_Deleted
							END AS 'Conversation Deleted',
							CASE
								WHEN ZVIBERMESSAGE.ZBEINGDELETED = 1 THEN 'True'
								WHEN ZVIBERMESSAGE.ZBEINGDELETED = 0 THEN 'False'
								ELSE ZVIBERMESSAGE.ZBEINGDELETED
							END AS 'Message Deleted',
							CHATS.ZTIMEBOMBDURATION AS 'Conversation Time Bomb Duration',
							ZVIBERMESSAGE.ZTIMEBOMBDURATION AS 'Message Time Bomb Duration',
							datetime(ZVIBERMESSAGE.ZTIMEBOMBTIMESTAMP+ 978307200,'unixepoch') AS 'Message Time Bomb Timestamp',
							CASE 
								WHEN CHATS.Chat_Favorite= 1 THEN 'True'
								WHEN CHATS.Chat_Favorite = 0 THEN 'False'
								ELSE CHATS.Chat_Favorite
							END AS 'Conversation Marked Favorite',
							ZVIBERMESSAGE.ZLIKESCOUNT AS 'Likes Count'
					FROM
						ZVIBERMESSAGE
					LEFT JOIN
							(SELECT
								ZVIBERMESSAGE.ZCONVERSATION,
								ZCONVERSATION.ZNAME AS 'Chat_Name',
								ZCONVERSATION.ZBEINGDELETED AS 'Chat_Deleted',
								ZCONVERSATION.ZFAVORITE AS 'Chat_Favorite',
								ZCONVERSATION.ZTIMEBOMBDURATION,
								coalesce(ZVIBERMESSAGE.ZPHONENUMINDEX,ZCONVERSATION.ZINTERLOCUTOR) AS 'MEMBER_ID',
								MEMBER.ZDISPLAYFULLNAME,
								MEMBER.ZDISPLAYSHORTNAME,
								MEMBER.ZNAME AS 'Participant_Name',
								MEMBER.ZCANONIZEDPHONENUM,
								MEMBER.ZPHONE,
								group_concat(DISTINCT(MEMBER.ZDISPLAYFULLNAME)) AS 'CHAT_MEMBERS',
								group_concat(DISTINCT(MEMBER.ZPHONE)) AS 'CHAT_PHONES',
								group_concat(DISTINCT(MEMBER.ZCANONIZEDPHONENUM)) AS 'CHAT_CANONIZED_PHONES'
							FROM ZVIBERMESSAGE,ZCONVERSATION
								LEFT JOIN
										(SELECT 
													ZMEMBER.ZDISPLAYFULLNAME,
													ZMEMBER.ZDISPLAYSHORTNAME,
													ZMEMBER.ZNAME,
													ZPHONENUMBER.ZCANONIZEDPHONENUM,
													ZPHONENUMBER.ZPHONE,
													ZMEMBER.Z_PK
											FROM ZMEMBER
											LEFT JOIN ZPHONENUMBER ON ZMEMBER.Z_PK = ZPHONENUMBER.ZMEMBER
											UNION
											SELECT 
													ZMEMBER.ZDISPLAYFULLNAME,
													ZMEMBER.ZDISPLAYSHORTNAME,
													ZMEMBER.ZNAME,
													ZPHONENUMBER.ZCANONIZEDPHONENUM,
													ZPHONENUMBER.ZPHONE,
													ZMEMBER.Z_PK
											FROM ZPHONENUMBER
											LEFT JOIN ZMEMBER ON ZPHONENUMBER.ZMEMBER  = ZMEMBER.Z_PK
										) AS MEMBER ON MEMBER.Z_PK = MEMBER_ID
								LEFT JOIN ZPHONENUMBER ON MEMBER_ID = ZPHONENUMBER.ZMEMBER
							WHERE ZVIBERMESSAGE.ZCONVERSATION = ZCONVERSATION.Z_PK
							GROUP BY ZVIBERMESSAGE.ZCONVERSATION
					) CHATS ON ZVIBERMESSAGE.ZCONVERSATION = CHATS.ZCONVERSATION

					LEFT JOIN
							(SELECT 
										ZMEMBER.ZDISPLAYFULLNAME,
										ZMEMBER.ZDISPLAYSHORTNAME,
										ZMEMBER.ZNAME,
										ZPHONENUMBER.ZCANONIZEDPHONENUM,
										ZPHONENUMBER.ZPHONE,
										ZMEMBER.Z_PK
								FROM ZMEMBER
								LEFT JOIN ZPHONENUMBER ON ZMEMBER.Z_PK = ZPHONENUMBER.ZMEMBER
								UNION
								SELECT 
										ZMEMBER.ZDISPLAYFULLNAME,
										ZMEMBER.ZDISPLAYSHORTNAME,
										ZMEMBER.ZNAME,
										ZPHONENUMBER.ZCANONIZEDPHONENUM,
										ZPHONENUMBER.ZPHONE,
										ZMEMBER.Z_PK
								FROM ZPHONENUMBER
								LEFT JOIN ZMEMBER ON ZPHONENUMBER.ZMEMBER  = ZMEMBER.Z_PK
							) AS CHAT_MEMBER ON ZVIBERMESSAGE.ZPHONENUMINDEX = CHAT_MEMBER.Z_PK
					LEFT JOIN
							(SELECT
												ZRECENT.ZDURATION,
												ZRECENT.ZCALLLOGMESSAGE,
												ZRECENT.ZDATE AS 'ZRECENTDATE',
												ZRECENTSLINE.ZDATE AS 'ZRECENTSLINEDATE',
												ZRECENT.ZCALLTYPE AS 'CALL TYPE',
												ZRECENTSLINE.ZCALLTYPE AS 'CALL TYPE',
												ZRECENTSLINE.ZPHONENUMBER AS 'PHONE NUMBER'
										FROM ZRECENT
										LEFT JOIN ZRECENTSLINE ON ZRECENT.ZRECENTSLINE = ZRECENTSLINE.Z_PK
							) AS RECENT ON ZVIBERMESSAGE.Z_PK = RECENT.ZCALLLOGMESSAGE 
					LEFT JOIN ZVIBERLOCATION ON ZVIBERMESSAGE.ZLOCATION = ZVIBERLOCATION.Z_PK
					LEFT JOIN ZATTACHMENT ON ZVIBERMESSAGE.ZATTACHMENT = ZATTACHMENT.Z_PK

					ORDER BY ZVIBERMESSAGE.Z_PK
								''')
					
				all_rows = cursor.fetchall()
				usageentries = len(all_rows)
				if usageentries > 0:
					data_list =[]
					for row in all_rows:
						temp_list = list(row)
						temp_chats_names = str(temp_list[4])
						temp_list[4] = temp_chats_names + ',' + str(viber_settings['_myUserName'])
						temp_chats_phones = str(temp_list[5])
						temp_list[5] = temp_chats_phones + ',' + str(viber_settings['_myPhoneNumber'])
						if temp_list[13]:
							y = json.loads(temp_list[13], strict=False) # the key that stores geolocation data is ['pa_message_data']['rich_media']['Buttons'][2]['Map']
							#if the 'Map' key is identified successfully it will assign lat,lon to the corresponding columns, otherwise it will continue on (passing any key,index errors)
							temp_list[13] = ''
							try:
								temp_list[18] = y['pa_message_data']['rich_media']['Buttons'][2]['Map']['Latitude']
								temp_list[19] = y['pa_message_data']['rich_media']['Buttons'][2]['Map']['Longitude']
							except (KeyError,IndexError) as e:
								pass

							#What this ugly long list of dict keys simply does is that it extracts only specific fields identified as important from the whole dictionary.
							#The reason why we extract only specific fields is because the report is much prettier. In order to have a complete picture you will have to go through the whole dictionary
							#while inspecting the .db itself. Therefore this column is named as 'Message Metadata Fragments'

							#general values
							if "Text" in y:
								try:
									temp_list[13] += "Text: "+ str(y['Text'])+","
								except KeyError:
									pass
							if "Title" in y:
								try:
									temp_list[13] += "Title: "+ str(y['Title'])+","
								except KeyError:
									pass
							if "URL" in y:
								try:
									temp_list[13] += "URL: "+ str(y['URL'])+","
								except KeyError:
									pass
							if "ThumbnailURL" in y:
								try:
									temp_list[13] += "ThumbnailURL: "+ str(y['ThumbnailURL'])+","
								except KeyError:
									pass
							if "Type" in y:
								try:
									temp_list[13] += "Type: "+ str(y['Type'])+","
								except KeyError:
									pass
						
							if "generalFwdInfo" in y:
								try:
									temp_list[13] += "Original Chat ID: "+ str(y['generalFwdInfo']['orig_chat_id'])+","
								except KeyError:
									pass

							if "audio_ptt" in y:
								try:
									temp_list[13] += "Audio Duration: "+ str(y['audio_ptt']['duration'])+","
								except KeyError:
									pass

							#fileInfo values
							if "fileInfo" in y:
								try:
									temp_list[13] += "File Info - Content Type: "+ str(y['fileInfo']['ContentType'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Type: "+ str(y['fileInfo']['Type'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Hash: "+ str(y['fileInfo']['FileHash'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Name: "+ str(y['fileInfo']['FileName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Extension: "+ str(y['fileInfo']['FileExt'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Duration: "+ str(y['fileInfo']['Duration'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Size: "+ str(y['fileInfo']['FileSize'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File Info - Original Size: "+ str(y['fileInfo']['OrigSize'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - iOS Origin: "+ str(y['fileInfo']['mediaInfo']['ios_origin'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Width: "+ str(y['fileInfo']['mediaInfo']['Width'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Height: "+ str(y['fileInfo']['mediaInfo']['Height'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "File|Media Info - Media Type: "+ str(y['fileInfo']['mediaInfo']['MediaType'])+","
								except KeyError:
									pass

							#custom_sticker_info values
							if "custom_sticker_info" in y:
								try:
									temp_list[13] += "Custom Sticker Info - Package ID: "+ str(y['custom_sticker_info']['package_id'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Custom Sticker Info - Sticker ID: "+ str(y['custom_sticker_info']['sticker_id'])+","
								except KeyError:
									pass

							#groupReferralInfo values
							if "groupReferralInfo" in y:
								try:
									temp_list[13] += "Group ID: "+ str(y['groupReferralInfo']['groupID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Group Name: "+ str(y['groupReferralInfo']['groupName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Invite Link: "+ str(y['groupReferralInfo']['inviteLink'])+","
								except KeyError:
									pass

							#pa_message_data values
							if "pa_message_data" in y:
								try:
									temp_list[13] += "Message Data - Text: "+ str(y['pa_message_data']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Sender Name: "+ str(y['pa_message_data']['sender']['name'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Alt Text: "+ str(y['pa_message_data']['alt_text'])+"," 
								except KeyError:
									pass
								try:
									temp_list[13] += "Message Data - Favorites Metadata - URL: "+ str(y['pa_message_data']['rich_media']['FavoritesMetadata']['url'])+","
								except KeyError:
									pass

							#pin values
							if "pin" in y:
								try:
									temp_list[13] += "Pin - Action: "+ str(y['pin']['action'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Pin - Text: "+ str(y['pin']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Pin - Description: "+ str(y['pin']['extended']['descr'])+","
								except KeyError:
									pass

							#poll values
							if "poll" in y:
								try:
									temp_list[13] += "Poll - Group ID: "+ str(y['poll']['groupID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Type: "+ str(y['poll']['type'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Sender ID: "+ str(y['poll']['senderID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Multiple: "+ str(y['poll']['multiple'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Quiz Text: "+ str(y['poll']['quiz_text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Poll - Description: "+ str(y['poll']['extended']['descr'])+","
								except KeyError:
									pass
								try:
									if y['poll']['options']:
										z = ''
										for x in y['poll']['options']:
											try:
												z = x['count']
												temp_list[13] += "Poll - Options - Count: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
											try:
												z = x['name']
												temp_list[13] += "Poll - Options - Name: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
											try:
												z = x['isLiked']
												temp_list[13] += "Poll - Options - Is Liked: "+ str(z)+","
											except (KeyError,IndexError) as e:
												pass
								except (KeyError,IndexError) as e:
									pass

							#quote values
							if "quote" in y:
								try:
									temp_list[13] += "Quote - Text: "+ str(y['quote']['text'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Name: "+ str(y['quote']['name'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment Name: "+ str(y['quote']['attachmentName'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment UID: "+ str(y['quote']['attachmentUID'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Attachment Preview Path: "+ str(y['quote']['attachmentPreviewPath'])+","
								except KeyError:
									pass
								try:
									temp_list[13] += "Quote - Text Meta Info - Data: "+ y['quote']['textMetaInfo_v2'][0]['data']+","
								except (KeyError,IndexError) as e:
									pass
		
						if temp_list[10] == 'Outgoing':
							temp_list[0] = viber_settings['_myUserName']
							temp_list[1] = ''
							temp_list[2] = viber_settings['_myPhoneNumber']
						
						if row[15] is not None:
							thumb = media_to_html(row[15], files_found, report_folder)
						else:
							thumb = ''
						
						row = tuple(temp_list)
						data_list.append((row[6], row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[14], row[15], thumb, row[8], row[9], row[10], row[11], row[12], row[16], row[17], row[18],row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[13]))
						

				if usageentries > 0:
					report = ArtifactHtmlReport('Viber - Chats')
					report.start_artifact_report(report_folder, 'Viber - Chats')
					report.add_script()
					data_headers = ('Timestamp', 'Sender (Display Full Name)','Sender (Display Short Name)','Sender (Phone)','Chat Name','Chat Participant(s)','Chat Phone(s)', 'Message Creation Date - UTC','Message Change State Date - UTC','Message Content','Attachment Name', 'Attachment','Call Date - UTC','Call Type','State','Duration (Seconds)','System Type Description','Attachment Type','Attachment Size','Latitude','Longitude','Conversation Deleted','Message Deleted', 'Conversation Time Bomb Duration','Message Time Bomb Duration','Message Time Bomb Timestamp - UTC','Conversation Marked Favorite','Likes Count','Message Metadata Fragments')
					report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Attachment']) #html_escape=False
					report.end_artifact_report()
					
					kmlactivity = 'Viber - Chats'
					kmlgen(report_folder, kmlactivity, data_list, data_headers)

					tsvname = 'Viber - Chats'
					tsv(report_folder, data_headers, data_list, tsvname)
				
				db.close()
			else:
				logfunc('No Viber data found.')
Exemplo n.º 21
0
def get_teams(files_found, report_folder, seeker):
    CacheFile = 0
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.sqlite'):
            databasedata = file_found
        if file_found.endswith('CacheFile'):
            CacheFile = file_found

    if CacheFile != 0:
        with open(CacheFile, 'rb') as nsfile:
            nsplist = nd.deserialize_plist(nsfile)

    db = open_sqlite_db_readonly(databasedata)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime('2001-01-01', "ZARRIVALTIME" || ' seconds'),
    ZIMDISPLAYNAME,
    ZCONTENT
    from ZSMESSAGE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            thumb = ''
            if '<div><img src=' in row[2]:
                matches = re.search('"([^"]+)"', row[2])
                imageURL = (matches[0].strip('\"'))
                if imageURL in nsplist.keys():
                    data_file_real_path = nsplist[imageURL]
                    for match in files_found:
                        if data_file_real_path in match:
                            shutil.copy2(match, report_folder)
                            data_file_name = os.path.basename(match)
                            thumb = f'<img src="{report_folder}/{data_file_name}"></img>'
            data_list.append((row[0], row[1], row[2], thumb))

        description = 'Teams Messages'
        report = ArtifactHtmlReport('Teams Messages')
        report.start_artifact_report(report_folder, 'Teams Messages',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Name', 'Message', 'Shared Media')
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Shared Media'])
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Messages'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Microsoft Teams Messages'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Microsoft Teams Messages data available')

    cursor.execute('''
    SELECT
    ZDISPLAYNAME,
    zemail,
    ZPHONENUMBER
    from
    ZDEVICECONTACTHASH
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = 'Teams Contact'
        report = ArtifactHtmlReport('Teams Contact')
        report.start_artifact_report(report_folder, 'Teams Contact',
                                     description)
        report.add_script()
        data_headers = ('Display Name', 'Email', 'Phone Number')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Teams Contact'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Teams Contact data available')

    cursor.execute('''
    SELECT
    datetime('2001-01-01', "ZTS_LASTSYNCEDAT" || ' seconds'),
    ZDISPLAYNAME,
    ZTELEPHONENUMBER
    from zuser
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = 'Teams User'
        report = ArtifactHtmlReport('Teams User')
        report.start_artifact_report(report_folder, 'Teams User', description)
        report.add_script()
        data_headers = ('Timestamp Last Sync', 'Display Name', 'Phone Number')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams User'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Microsoft Teams User'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Teams User data available')

    cursor.execute('''
    SELECT
    ZCOMPOSETIME,
    zfrom,
    ZIMDISPLAYNAME,
    zcontent,
    ZPROPERTIES
    from ZSMESSAGE, ZMESSAGEPROPERTIES
    where ZSMESSAGE.ZTSID = ZMESSAGEPROPERTIES.ZTSID
    order by ZCOMPOSETIME
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list_calls = []
    data_list_cards = []
    data_list_unparsed = []

    if usageentries > 0:
        for row in all_rows:
            plist = ''
            composetime = row[0].replace('T', ' ')
            plist_file_object = io.BytesIO(row[4])
            if row[4].find(b'NSKeyedArchiver') == -1:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(plist_file_object)
                else:
                    plist = biplist.readPlist(plist_file_object)
            else:
                try:
                    plist = nd.deserialize_plist(plist_file_object)
                except (nd.DeserializeError,
                        nd.biplist.NotBinaryPlistException,
                        nd.biplist.InvalidPlistException,
                        nd.plistlib.InvalidFileException,
                        nd.ccl_bplist.BplistError, ValueError, TypeError,
                        OSError, OverflowError) as ex:
                    logfunc(f'Failed to read plist for {row[4]}, error was:' +
                            str(ex))
            if 'call-log' in plist:
                datacalls = json.loads(plist['call-log'])
                callstart = (datacalls.get('startTime'))
                callstart = callstart.replace('T', ' ')
                callconnect = (datacalls.get('connectTime'))
                callconnect = callconnect.replace('T', ' ')
                callend = (datacalls['endTime'])
                callend = callend.replace('T', ' ')
                calldirection = (datacalls['callDirection'])
                calltype = (datacalls['callType'])
                callstate = (datacalls['callState'])
                calloriginator = (datacalls['originator'])
                calltarget = (datacalls['target'])
                calloriginatordname = (
                    datacalls['originatorParticipant']['displayName'])
                callparticipantdname = (
                    datacalls['targetParticipant']['displayName'])
                data_list_calls.append(
                    (composetime, row[1], row[2], row[3], callstart,
                     callconnect, callend, calldirection, calltype, callstate,
                     calloriginator, calltarget, calloriginatordname,
                     callparticipantdname))
            elif 'cards' in plist:
                cards = json.loads(plist['cards'])
                cardurl = (
                    cards[0]['content']['body'][0]['selectAction']['url'])
                cardtitle = (
                    cards[0]['content']['body'][0]['selectAction']['title'])
                cardtext = (cards[0]['content']['body'][1]['text'])
                cardurl2 = (cards[0]['content']['body'][0]['url'])
                if (cards[0]['content']['body'][0].get('id')) is not None:
                    idcontent = json.loads(
                        cards[0]['content']['body'][0]['id'])
                    cardlat = (idcontent.get('latitude'))
                    cardlong = (idcontent.get('longitude'))
                    cardexpires = (idcontent.get('expiresAt'))
                    cardexpires = datetime.datetime.fromtimestamp(cardexpires /
                                                                  1000)
                    carddevid = (idcontent.get('deviceId'))
                data_list_cards.append(
                    (composetime, row[1], row[2], row[3], cardurl, cardtitle,
                     cardtext, cardurl2, cardlat, cardlong, cardexpires,
                     carddevid))
            else:
                data_list_unparsed.append(composetime, row[1], row[2], row[3],
                                          plist)

        description = 'Microsoft Teams Call Logs'
        report = ArtifactHtmlReport('Microsoft Teams Call Logs')
        report.start_artifact_report(report_folder, 'Teams Call Logs',
                                     description)
        report.add_script()
        data_headers = ('Compose Timestamp', 'From', 'Display Name', 'Content',
                        ' Call Start', 'Call Connect', 'Call End',
                        'Call Direction', 'Call Type', 'Call State',
                        'Call Originator', 'Call Target',
                        'Call Originator Name', 'Call Participant Name')
        report.write_artifact_data_table(data_headers, data_list_calls,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Call Logs'
        tsv(report_folder, data_headers, data_list_calls, tsvname)

        tlactivity = 'Microsoft Teams Call Logs'
        timeline(report_folder, tlactivity, data_list_calls, data_headers)

        description = 'Microsoft Teams Shared Locations'
        report = ArtifactHtmlReport('Microsoft Teams Shared Locations')
        report.start_artifact_report(report_folder, 'Teams Shared Locations',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'From', 'Display Name', 'Content',
                        'Card URL', 'Card Title', 'Card Text', 'Card URL2',
                        'Latitude', 'Longitude', 'Card Expires', 'Device ID')
        report.write_artifact_data_table(data_headers, data_list_cards,
                                         file_found)
        report.end_artifact_report()

        tsvname = 'Microsoft Teams Shared Locations'
        tsv(report_folder, data_headers, data_list_cards, tsvname)

        tlactivity = 'Microsoft Teams Shared Locations'
        timeline(report_folder, tlactivity, data_list_cards, data_headers)

        kmlactivity = 'Microsoft Teams Shared Locations'
        kmlgen(report_folder, kmlactivity, data_list_cards, data_headers)

    else:
        logfunc('No Microsoft Teams Call Logs & Cards data available')

    db.close()
Exemplo n.º 22
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		bundleid,
		altitude,
		horizontalaccuracy,
		verticalaccuracy,
		state,
		age,
		routinemode,
		locationofinteresttype,
		latitude,
		longitude,
		speed,
		course,
		confidence
		from appharvest
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Altitude',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'State', 'Age', 'Routine Mode',
                            'Location of Interest Type', 'Latitude',
                            'Longitude', 'Speed', 'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    if does_table_exist(db, "cdmacelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		sid,
		nid,
		bsid,
		zoneid,
		bandclass,
		channel,
		pnoffset,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from cdmacelllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15], row[16]))

            description = ''
            report = ArtifactHtmlReport('LocationD CDMA Location')
            report.start_artifact_report(report_folder, 'CDMA Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'SID', 'NID', 'BSID', 'ZONEID',
                            'BANDCLASS', 'Channel', 'PNOFFSET', 'Altitude',
                            'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD CDMA Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD CDMA Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD CDMA Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD CDMA Location')

    if does_table_exist(db, "celllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		lac,
		ci,
		uarfcn,
		psc,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from celllocation
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13], row[14]))

            description = ''
            report = ArtifactHtmlReport('LocationD Cell Location')
            report.start_artifact_report(report_folder, 'Cell Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'LAC', 'CI', 'UARFCN',
                            'PSC', 'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD Cell Location')

    if does_table_exist(db, "ltecelllocation"):
        cursor = db.cursor()
        cursor.execute("""
		select 
		datetime(timestamp + 978307200,'unixepoch'),
		mcc,
		mnc,
		ci,
		uarfcn,
		pid,
		altitude,
		speed,
		course,
		confidence,
		horizontalaccuracy,
		verticalaccuracy,
		latitude,
		longitude
		from ltecelllocation	
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = ''
            report = ArtifactHtmlReport('LocationD LTE Location')
            report.start_artifact_report(report_folder, 'LTE Location',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'MCC', 'MNC', 'CI', 'UARFCN', 'PID',
                            'Altitude', 'Speed', 'Course', 'Confidence',
                            'Horizontal Accuracy', 'Vertical Accuracy',
                            'Latitude', 'Longitude')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD LTE Location'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD LTE Location'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD LTE Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    cursor.execute("""
	select
	datetime(timestamp + 978307200,'unixepoch'),
	mac,
	channel,
	infomask,
	speed,
	course,
	confidence,
	score,
	reach,
	horizontalaccuracy,
	verticalaccuracy,
	latitude,
	longitude
	from wifilocation
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'MAC', 'Channel', 'Infomask', 'Speed',
                        'Course', 'Confidence', 'Score', 'Reach',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')
Exemplo n.º 23
0
def get_locationDparked(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("12"):
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(zrtvehicleeventmo.zdate + 978307200, 'unixepoch'),
        datetime(zrtvehicleeventmo.zlocdate + 978307200, 'unixepoch'),
        zvehicleidentifier,
        zlocuncertainty,
        zidentifier,
        zlocationquality,
        zusersetlocation,
        zusuallocation,
        znotes,
        zphotodata,
        zloclatitude,
        zloclongitude
        from
        zrtvehicleeventmo
        ''')
    else:
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(zrtvehicleeventmo.zdate + 978307200, 'unixepoch'),
        datetime(zrtvehicleeventmo.zlocdate + 978307200, 'unixepoch'),
        zvehicleidentifier,
        zlocuncertainty,
        zidentifier,
        zlocationquality,
        zusersetlocation,
        zusuallocation,
        znotes,
        zgeomapitem,
        zphotodata,
        zloclatitude,
        zloclongitude
        from
        zrtvehicleeventmo
            ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        
        if version.parse(iOSversion) >= version.parse("12"):
            
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11]))

            report = ArtifactHtmlReport('RoutineD Vehicle Location')
            report.start_artifact_report(report_folder, 'Vehicle Location')
            report.add_script()
            data_headers = ('Timestamp','Location Date','Vehicle Identifier', 'Location Identifier', 'Identifier','Location Quality','User Set Location','Usual Location','Notes','Photo Data','Latitude','Longitude')  
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'RoutineD Vehicle Location'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'RoutineD Vehicle Location'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
            kmlactivity = 'RoutineD Vehicle Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12]))
            
            report = ArtifactHtmlReport('RoutineD Vehicle Location')
            report.start_artifact_report(report_folder, 'Vehicle Location')
            report.add_script()
            data_headers = ('Timestamp','Location Date','Vehicle Identifier', 'Location Identifier', 'Identifier','Location Quality','User Set Location','Usual Location','Notes','Geo Map Item','Photo Data','Latitude','Longitude')  
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'RoutineD Vehicle Location'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'RoutineD Vehicle Location'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
            kmlactivity = 'RoutineD Vehicle Location'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available in RoutineD Vehicle Location')

    db.close()
    return      
    
     
Exemplo n.º 24
0
def get_icloudPhotoMeta(files_found, report_folder, seeker):
    counter = 0
    os.makedirs(os.path.join(report_folder, "bplists"))
    for file_found in files_found:
        file_found = str(file_found)
        counter = counter + 1
        data_list = []
        with open(file_found, "r") as filecontent:
            for line in filecontent:
                jsonconv = json.loads(line)
                #print(jsonconv)

                if (isinstance(jsonconv, dict)):
                    jsonconv = jsonconv['results']
                length = len(jsonconv)

                for i in range(length):
                    #Initialize variables for when items are not located in a loop.
                    id = ''
                    created_timestamp = ''
                    created_user = ''
                    created_device = ''
                    modified_timestamp = ''
                    modified_user = ''
                    modified_device = ''
                    decoded = ''
                    is_deleted = ''
                    is_expunged = ''
                    org_filesize = ''
                    rec_mod_date = ''
                    import_date = ''
                    f_org_creation_date = ''
                    res_org_filesize = ''
                    added_date = ''
                    timezoneoffse = ''
                    latitude = ''
                    longitude = ''
                    altitude = ''
                    datestamp = ''
                    timestamp = ''
                    vid_name = ''
                    decoded_tz = ''
                    title = ''
                    recordtype = ''
                    tiff = ''
                    exif = ''

                    id = (jsonconv[i].get('id', ''))
                    rowid = str(i)
                    recordtype = (jsonconv[i].get('recordType', ''))

                    if (jsonconv[i].get('created', '')):
                        created_timestamp = (jsonconv[i]['created'].get(
                            'timestamp', ''))
                        created_timestamp = timefactorconversion(
                            str(created_timestamp))
                        #created_user = (jsonconv[i]['created'].get('user', ''))
                        #created_device = (jsonconv[i]['created'].get('device', ''))

                    if (jsonconv[i].get('modified', '')):
                        modified_timestamp = (jsonconv[i]['modified'].get(
                            'timestamp', ''))
                        modified_timestamp = timefactorconversion(
                            str(modified_timestamp))
                        #modified_user = (jsonconv[i]['modified'].get('user', ''))
                        #modified_device = (jsonconv[i]['modified'].get('device', ''))

                    if (jsonconv[i].get('fields')):
                        coded_string = (jsonconv[i]['fields'].get(
                            'filenameEnc', ''))  #base64
                        decoded = base64.b64decode(coded_string)
                        decoded = decoded.decode()
                        coded_string_tz = (jsonconv[i]['fields'].get(
                            'timeZoneNameEnc', ''))  #base64
                        decoded_tz = base64.b64decode(coded_string_tz)
                        decoded_tz = decoded_tz.decode()
                        is_deleted = (jsonconv[i]['fields'].get(
                            'isDeleted', ''))
                        is_expunged = (jsonconv[i]['fields'].get(
                            'isExpunged', ''))
                        org_filesize = (jsonconv[i]['fields'].get(
                            'resOriginalFileSize', ''))
                        res_org_filesize = (jsonconv[i]['fields'].get(
                            'resOriginalFileSize', ''))

                        if (jsonconv[i]['fields'].get('originalCreationDate',
                                                      '')):
                            created_timestamp = (jsonconv[i]['fields'].get(
                                'originalCreationDate', ''))
                            created_timestamp = timefactorconversion(
                                str(created_timestamp))

                        rec_mod_date = (jsonconv[i]['fields'].get(
                            'recordModificationDate', ''))
                        if isinstance(rec_mod_date, int):
                            rec_mod_date = timefactorconversion(
                                str(rec_mod_date))

                        import_date = (jsonconv[i]['fields'].get(
                            'importDate', ''))
                        if isinstance(import_date, int):
                            import_date = timefactorconversion(
                                str(import_date))

                        f_org_creation_date = (jsonconv[i]['fields'].get(
                            'originalCreationDate', ''))
                        if isinstance(f_org_creation_date, int):
                            f_org_creation_date = timefactorconversion(
                                str(f_org_creation_date))

                        added_date = (jsonconv[i]['fields'].get(
                            'addedDate', ''))
                        if isinstance(added_date, int):
                            added_date = timefactorconversion(str(added_date))

                        timezoneoffse = (jsonconv[i]['fields'].get(
                            'timeZoneOffse', ''))
                        title = (jsonconv[i]['fields'].get('title', ''))
                        decodedt = base64.b64decode(title)
                        title = decodedt.decode()

                        coded_bplist = (
                            jsonconv[i]['fields'].get('mediaMetaDataEnc')
                        )  #convert from base64 to bplist and process
                        if coded_bplist is not None:
                            decoded_bplist = base64.b64decode(coded_bplist)
                            # If needed send the full bplist to report folder by editing the outputpath below
                            with open(
                                    os.path.join(report_folder, "bplists",
                                                 rowid + ".bplist"),
                                    'wb') as g:
                                g.write(decoded_bplist)
                            pl = plistlib.loads(decoded_bplist)
                            if (pl.get('{TIFF}')):
                                #print('YESS TIFF # '+str(i))
                                tiff = (pl.get('{TIFF}'))
                                exif = (pl.get('{Exif}'))

                                if (pl.get('{GPS}')) is not None:
                                    #print(pl['{GPS}'])
                                    latitude = (pl['{GPS}'].get('Latitude'))
                                    longitude = (pl['{GPS}'].get('Longitude'))
                                    altitude = (pl['{GPS}'].get('Altitude'))
                                    datestamp = (pl['{GPS}'].get('DateStamp'))
                                    timestamp = (pl['{GPS}'].get('TimeStamp'))
                                    #print(latitude)
                            else:
                                if (
                                        pl.get('moop')
                                ):  #If needed send binary content to report flder
                                    pass
                                    #with open(f'{outputpath}/{i}.moop', 'wb') as g:
                                    #    g.write(pl.get('moop'))

                    data_list.append(
                        (created_timestamp, rowid, recordtype, decoded, title,
                         org_filesize, latitude, longitude, altitude,
                         datestamp, timestamp, added_date, timezoneoffse,
                         decoded_tz, is_deleted, is_expunged, import_date,
                         rec_mod_date, res_org_filesize, id, tiff, exif))

        if len(data_list) > 0:
            report = ArtifactHtmlReport('iCloud - Photos Metadata' + ' ' +
                                        str(counter))
            report.start_artifact_report(
                report_folder, 'iCloud - Photos Metadata' + ' ' + str(counter))
            report.add_script()
            data_headers = ('Timestamp', 'Row ID', 'Record Type', 'Decoded',
                            'Title', 'Filesize', 'Latitude', 'Longitude',
                            'Altitude', 'GPS Datestamp', 'GPS Time',
                            'Added Date', 'Timezone Offset', 'Decoded TZ',
                            'Is Deleted?', 'Is Expunged?', 'Import Date',
                            'Modification Date', 'Filesize', 'ID', 'TIFF',
                            'EXIF')
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_escape=False)
            report.end_artifact_report()

            tsvname = 'iCloud - Photos Metadata'
            tsv(report_folder, data_headers, data_list, tsvname)

            kmlactivity = 'iCloud - Photos Metadata'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available')
Exemplo n.º 25
0
def get_fitbitExercise(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)

        if not file_found.endswith('exercise_db'):
            continue  # Skip all other files

        data_list = []
        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        Select DISTINCT(SESSION_ID)
        from EXERCISE_EVENT
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            for row in all_rows:
                sessionID = row[0]
                cursor.execute(f'''
                Select
                datetime(TIME/1000,'unixepoch'),
                LABEL,
                LATITUDE,
                LONGITUDE,
                ACCURACY,
                ALTITUDE,
                SPEED,
                PACE,
                SESSION_ID
                from EXERCISE_EVENT
                where SESSION_ID = "{sessionID}" 
                ''')
                all_rows_exercise = cursor.fetchall()
                usageentries_all = len(all_rows_exercise)
                if usageentries_all > 0:
                    data_list_current = []
                    data_headers = ('Timestamp', 'Label', 'Latitude',
                                    'Longitude', 'Accuracy', 'Altitude',
                                    'Speed', 'Pace', 'Session_ID')
                    for row_exercise in all_rows_exercise:
                        data_list.append(
                            (row_exercise[0], row_exercise[1], row_exercise[2],
                             row_exercise[3], row_exercise[4], row_exercise[5],
                             row_exercise[6], row_exercise[7],
                             row_exercise[8]))
                        data_list_current.append(
                            (row_exercise[0], row_exercise[1], row_exercise[2],
                             row_exercise[3], row_exercise[4], row_exercise[5],
                             row_exercise[6], row_exercise[7],
                             row_exercise[8]))

                    kmlactivity = f'Fitbit Map - Session ID {sessionID}'
                    kmlgen(report_folder, kmlactivity, data_list_current,
                           data_headers)

                    data_list_current = []

            report = ArtifactHtmlReport('Fitbit Exercise')
            report.start_artifact_report(report_folder, 'Fitbit Exercise')
            report.add_script()

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Fitbit Exercise'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Fitbit Exercise'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No Fitbit - Exercise data available')
Exemplo n.º 26
0
def get_googlePhotos(files_found, report_folder, seeker, wrap_text):

    source_file_photos = ''
    source_file_cache = ''
    source_file_trash = ''
    gphotos_photos_db = ''
    gphotos_cache_db = ''
    gphotos_trash_db = ''

    for file_found in files_found:

        file_name = str(file_found)
        if file_name.lower().endswith('gphotos0.db'):
            gphotos_photos_db = str(file_found)
            source_file_photos = file_found.replace(seeker.directory, '')

        if file_name.lower().endswith('disk_cache'):
            gphotos_cache_db = str(file_found)
            source_file_cache = file_found.replace(seeker.directory, '')

        if file_name.lower().endswith('local_trash.db'):
            gphotos_trash_db = str(file_found)
            source_file_trash = file_found.replace(seeker.directory, '')

    db = open_sqlite_db_readonly(gphotos_photos_db)
    cursor = db.cursor()

    columns = [i[1] for i in cursor.execute('PRAGMA table_info(local_media)')]

    if 'purge_timestamp' not in columns:
        cursor.execute('''
        select
        datetime(utc_timestamp/1000, 'unixepoch'),
        filename,
        filepath,
        datetime(capture_timestamp/1000, 'unixepoch'),
        timezone_offset/3600000,
        width,
        height,
        size_bytes,
        case duration
            when 0 then ''
            else strftime('%H:%M:%S', duration/1000, 'unixepoch')
        end,
        latitude,
        longitude,
        folder_name,
        media_store_id,
        case trash_timestamp
            when NULL then ''
            else datetime(trash_timestamp/1000, 'unixepoch')
        end
        from local_media
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Google Photos - Local Media')
            report.start_artifact_report(report_folder,
                                         'Google Photos - Local Media')
            report.add_script()
            data_headers = ('Timestamp', 'File Name', 'File Path',
                            'Captured Timestamp (Local)', 'Timezone Offset',
                            'Width', 'Height', 'Size', 'Duration', 'Latitude',
                            'Longitude', 'Folder Name', 'Media Store ID',
                            'Trashed Timestamp')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Photos - Local Media'
            tsv(report_folder, data_headers, data_list, tsvname,
                source_file_photos)

            tlactivity = f'Google Photos - Local Media'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Google Photos - Local Media'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No Google Photos - Local Media data available')

    else:
        cursor.execute('''
        select
        datetime(utc_timestamp/1000, 'unixepoch'),
        filename,
        filepath,
        datetime(capture_timestamp/1000, 'unixepoch'),
        timezone_offset/3600000,
        width,
        height,
        size_bytes,
        case duration
            when 0 then ''
            else strftime('%H:%M:%S', duration/1000, 'unixepoch')
        end,
        latitude,
        longitude,
        folder_name,
        media_store_id,
        case trash_timestamp
            when NULL then ''
            else datetime(trash_timestamp/1000, 'unixepoch')
        end,
        case purge_timestamp
            when NULL then ''
            else datetime(purge_timestamp/1000, 'unixepoch')
        end
        from local_media
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Google Photos - Local Media')
            report.start_artifact_report(report_folder,
                                         'Google Photos - Local Media')
            report.add_script()
            data_headers = ('Timestamp', 'File Name', 'File Path',
                            'Captured Timestamp (Local)', 'Timezone Offset',
                            'Width', 'Height', 'Size', 'Duration', 'Latitude',
                            'Longitude', 'Folder Name', 'Media Store ID',
                            'Trashed Timestamp', 'Purge Timestamp')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13], row[14]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Photos - Local Media'
            tsv(report_folder, data_headers, data_list, tsvname,
                source_file_photos)

            tlactivity = f'Google Photos - Local Media'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Google Photos - Local Media'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No Google Photos - Local Media data available')

    columns2 = [
        i[1] for i in cursor.execute('PRAGMA table_info(remote_media)')
    ]

    if 'inferred_latitude' not in columns2:
        cursor.execute('''
        select
        datetime(utc_timestamp/1000, 'unixepoch'),
        filename,
        replace(remote_url,'=s0-d',''),
        datetime(capture_timestamp/1000, 'unixepoch'),
        timezone_offset/3600000,
        strftime('%H:%M:%S', duration/1000, 'unixepoch'),
        latitude,
        longitude,
        upload_status
        from remote_media
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Google Photos - Remote Media')
            report.start_artifact_report(report_folder,
                                         'Google Photos - Remote Media')
            report.add_script()
            data_headers = ('Timestamp', 'File Name', 'Remote URL',
                            'Captured Timestamp (Local)', 'Timezone Offset',
                            'Duration', 'Latitude', 'Longitude',
                            'Upload Status %')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Photos - Remote Media'
            tsv(report_folder, data_headers, data_list, tsvname,
                source_file_photos)

            tlactivity = f'Google Photos - Remote Media'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Google Photos - Remote Media'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No Google Photos - Remote Media data available')

    else:
        cursor.execute('''
        select
        datetime(utc_timestamp/1000, 'unixepoch'),
        filename,
        replace(remote_url,'=s0-d',''),
        datetime(capture_timestamp/1000, 'unixepoch'),
        timezone_offset/3600000,
        strftime('%H:%M:%S', duration/1000, 'unixepoch'),
        latitude,
        longitude,
        inferred_latitude,
        inferred_longitude,
        upload_status
        from remote_media
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Google Photos - Remote Media')
            report.start_artifact_report(report_folder,
                                         'Google Photos - Remote Media')
            report.add_script()
            data_headers = ('Timestamp', 'File Name', 'Remote URL',
                            'Captured Timestamp (Local)', 'Timezone Offset',
                            'Duration', 'Latitude', 'Longitude',
                            'Inferred Latitude', 'Inferred Longitude',
                            'Upload Status %')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Photos - Remote Media'
            tsv(report_folder, data_headers, data_list, tsvname,
                source_file_photos)

            tlactivity = f'Google Photos - Remote Media'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'Google Photos - Remote Media'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No Google Photos - Remote Media data available')

    cursor.execute('''
    select
    datetime(utc_timestamp/1000, 'unixepoch') as "Timestamp (UTC)",
    filename,
    replace(remote_url,'=s0-d',''),
    size_bytes,
    datetime(capture_timestamp/1000, 'unixepoch') as "Capture Local Timestamp",
    timezone_offset/3600000,
    upload_status
    from shared_media
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Google Photos - Shared Media')
        report.start_artifact_report(report_folder,
                                     'Google Photos - Shared Media')
        report.add_script()
        data_headers = ('Timestamp', 'File Name', 'Remote URL', 'Size',
                        'Captured Timestamp (Local)', 'Timezone Offset',
                        'Upload Status %')
        data_list = []
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Google Photos - Shared Media'
        tsv(report_folder, data_headers, data_list, tsvname,
            source_file_photos)

        tlactivity = f'Google Photos - Shared Media'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Google Photos - Shared Media data available')

    cursor.execute('''
    select 
    DISTINCT(local_media.bucket_id),
    local_media.folder_name,
    rtrim(local_media.filepath, replace(local_media.filepath, '/', ''))
    from local_media, backup_folders
    where local_media.bucket_id = backup_folders.bucket_id
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Google Photos - Backed Up Folders')
        report.start_artifact_report(report_folder,
                                     'Google Photos - Backed Up Folders')
        report.add_script()
        data_headers = (
            'Bucket ID',
            'Backed Up Folder Name',
            'Backed Up Folder Path',
        )
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Google Photos - Backed Up Folders'
        tsv(report_folder, data_headers, data_list, tsvname,
            source_file_photos)

        tlactivity = f'Google Photos - Backed Up Folders'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Google Photos - Backed Up Folders data available')

    db.close()

    db = open_sqlite_db_readonly(gphotos_cache_db)
    cursor = db.cursor()

    cursor.execute('''
    select
    datetime(last_modified_time/1000, 'unixepoch'),
    key,
    size,
    case pending_delete
        when 0 then ''
        when 1 then 'Yes'
    end
    from journal
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:
            fileNameKey = row[1]

            for match in files_found:
                if fileNameKey in match:
                    mimetype = magic.from_file(match, mime=True)
                    ext = (mimetype.split('/')[1])
                    newname = os.path.join(report_folder,
                                           f'{fileNameKey}.{ext}')
                    shutil.copy2(match, newname)

            thumb = media_to_html(fileNameKey, files_found, report_folder)

            data_list.append((row[0], row[1], thumb, row[2], row[3]))

        report = ArtifactHtmlReport('Google Photos - Cache')
        report.start_artifact_report(report_folder, 'Google Photos - Cache')
        report.add_script()
        data_headers = ('Timestamp', 'Key', 'Image', 'Size',
                        'Pending Deletion')

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Image'])
        report.end_artifact_report()

        tsvname = f'Google Photos - Cache'
        tsv(report_folder, data_headers, data_list, tsvname, source_file_cache)

        tlactivity = f'Google Photos - Cache'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Google Photos - Cache data available')

    db.close()

    db = open_sqlite_db_readonly(gphotos_trash_db)
    cursor = db.cursor()

    cursor.execute('''
    select
    datetime(deleted_time/1000, 'unixepoch'),
    local_path,
    content_uri,
    trash_file_name,
    case is_video
        when 0 then ''
        when 1 then 'Yes'
    end,
    media_store_id
    from local
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []

    if usageentries > 0:
        for row in all_rows:

            fileNameKey = row[3]
            thumb = ''

            for match in files_found:
                if fileNameKey in match:
                    mimetype = magic.from_file(match, mime=True)
                    ext = (mimetype.split('/')[1])
                    newname = os.path.join(report_folder,
                                           f'{fileNameKey}.{ext}')
                    shutil.copy2(match, newname)

            thumb = media_to_html(fileNameKey, files_found, report_folder)

            data_list.append(
                (row[0], row[1], row[2], row[3], thumb, row[4], row[5]))

        report = ArtifactHtmlReport('Google Photos - Local Trash')
        report.start_artifact_report(report_folder,
                                     'Google Photos - Local Trash')
        report.add_script()
        data_headers = ('Timestamp', 'Original Path', 'Content URI',
                        'File Name', 'Image', 'Is Video', 'Media Store ID')

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_no_escape=['Image'])
        report.end_artifact_report()

        tsvname = f'Google Photos - Local Trash'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Google Photos - Local Trash'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Google Photos - Local Trash data available')

    db.close()
    return
Exemplo n.º 27
0
def get_routineDLocationsLocal(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc(
            "Unsupported version for RoutineD Locations Local.sqlite on iOS " +
            iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)

            if file_found.endswith('Local.sqlite'):
                break

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        if version.parse(iOSversion) >= version.parse("14"):  # Tested 14.1
            cursor.execute('''
            select
            datetime(zrtlearnedlocationofinterestvisitmo.zentrydate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinterestvisitmo.zexitdate + 978307200, 'unixepoch'),
            (zrtlearnedlocationofinterestvisitmo.zexitdate-zrtlearnedlocationofinterestvisitmo.zentrydate)/60.00,
            zrtlearnedlocationofinterestmo.zlocationlatitude,
            zrtlearnedlocationofinterestmo.zlocationlongitude,
            zrtlearnedlocationofinterestvisitmo.zconfidence,
            zrtlearnedlocationofinterestvisitmo.zlocationverticaluncertainty,
            zrtlearnedlocationofinterestvisitmo.zlocationhorizontaluncertainty,
            zrtlearnedlocationofinterestvisitmo.zdatapointcount,
            datetime(zrtlearnedlocationofinterestvisitmo.zcreationdate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinterestvisitmo.zexpirationdate + 978307200, 'unixepoch'),
            zrtlearnedlocationofinterestvisitmo.zlocationlatitude,
            zrtlearnedlocationofinterestvisitmo.zlocationlongitude 
            from
            zrtlearnedlocationofinterestvisitmo,
            zrtlearnedlocationofinterestmo 
            where zrtlearnedlocationofinterestmo.z_pk = zrtlearnedlocationofinterestvisitmo.zlocationofinterest
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            data_list = []
            if usageentries > 0:
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8], row[9],
                                      row[10], row[11], row[12]))

                description = 'Significant Locations - Location of Interest Entry (Historical)'
                report = ArtifactHtmlReport('Locations')
                report.start_artifact_report(report_folder,
                                             'RoutineD Locations Entry',
                                             description)
                report.add_script()
                data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)',
                                'Latitude', 'Longitude', 'Confidence',
                                'Location Vertical Uncertainty',
                                'Location Horizontal Uncertainty',
                                'Data Point Count', 'Place Creation Date',
                                'Expiration', 'Visit latitude',
                                'Visit Longitude')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'RoutineD Locations Entry'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'RoutineD Locations Entry'
                timeline(report_folder, tlactivity, data_list, data_headers)

                kmlactivity = 'RoutineD Locations Entry'
                kmlgen(report_folder, kmlactivity, data_list, data_headers)

            else:
                logfunc(
                    'No RoutineD Significant Locations Entry data available')

        else:  # < ios 14
            cursor.execute('''
            select
            datetime(zrtlearnedlocationofinterestvisitmo.zentrydate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinterestvisitmo.zexitdate + 978307200, 'unixepoch'),
            (zrtlearnedlocationofinterestvisitmo.zexitdate-zrtlearnedlocationofinterestvisitmo.zentrydate)/60.00,
            zrtlearnedlocationofinterestmo.zlocationlatitude,
            zrtlearnedlocationofinterestmo.zlocationlongitude,
            zrtlearnedlocationofinterestvisitmo.zconfidence,
            zrtlearnedlocationofinterestvisitmo.zlocationuncertainty,
            zrtlearnedlocationofinterestvisitmo.zdatapointcount,
            datetime(zrtlearnedlocationofinterestvisitmo.zcreationdate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinterestvisitmo.zexpirationdate + 978307200, 'unixepoch'),
            zrtlearnedlocationofinterestvisitmo.zlocationlatitude,
            zrtlearnedlocationofinterestvisitmo.zlocationlongitude
            from
            zrtlearnedlocationofinterestvisitmo,
            zrtlearnedlocationofinterestmo 
            where zrtlearnedlocationofinterestmo.z_pk = zrtlearnedlocationofinterestvisitmo.zlocationofinterest
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            data_list = []
            if usageentries > 0:
                for row in all_rows:
                    data_list.append(
                        (row[0], row[1], row[2], row[3], row[4], row[5],
                         row[6], row[7], row[8], row[9], row[10], row[11]))

                description = 'Significant Locations - Location of Interest Entry (Historical)'
                report = ArtifactHtmlReport('Locations')
                report.start_artifact_report(report_folder,
                                             'RoutineD Locations Entry',
                                             description)
                report.add_script()
                data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)',
                                'Latitude', 'Longitude', 'Confidence',
                                'Location Uncertainty', 'Data Point Count',
                                'Place Creation Date', 'Expiration',
                                'Visit latitude', 'Visit Longitude')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'RoutineD Locations Entry'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'RoutineD Locations Entry'
                timeline(report_folder, tlactivity, data_list, data_headers)

                kmlactivity = 'RoutineD Locations Entry'
                kmlgen(report_folder, kmlactivity, data_list, data_headers)

            else:
                logfunc(
                    'No RoutineD Significant Locations Entry data available')

        if version.parse(iOSversion) >= version.parse("12"):
            cursor.execute('''
            select
            datetime(zrtlearnedlocationofinteresttransitionmo.zstartdate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinteresttransitionmo.zstopdate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinteresttransitionmo.zcreationdate + 978307200, 'unixepoch'),
            datetime(zrtlearnedlocationofinteresttransitionmo.zexpirationdate + 978307200, 'unixepoch'),
            zrtlearnedlocationofinterestmo.zlocationlatitude,
            zrtlearnedlocationofinterestmo.zlocationlongitude
            from
            zrtlearnedlocationofinteresttransitionmo 
            left join zrtlearnedlocationofinterestmo 
            on zrtlearnedlocationofinterestmo.z_pk = zrtlearnedlocationofinteresttransitionmo.zlocationofinterest
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            data_list = []
            if usageentries > 0:
                for row in all_rows:
                    data_list.append(
                        (row[0], row[1], row[2], row[3], row[4], row[5]))

                description = 'Significant Locations - Location of Interest Transition(Historical)'
                report = ArtifactHtmlReport('Locations')
                report.start_artifact_report(report_folder,
                                             'RoutineD Transition',
                                             description)
                report.add_script()
                data_headers = ('Start', 'Stop', 'Creation Date', 'Expiration',
                                'Latitude', 'Longitude')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'RoutineD Transition'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'RoutineD Transition'
                timeline(report_folder, tlactivity, data_list, data_headers)

                tlactivity = 'RoutineD Transition'
                timeline(report_folder, tlactivity, data_list, data_headers)

            else:
                logfunc(
                    'No RoutineD Significant Locations Transition data available'
                )
Exemplo n.º 28
0
def get_routineDCloud(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    for file_found in files_found:
      file_found = str(file_found)
      
      if file_found.endswith('Cloud-V2.sqlite'):
        break
        
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    if version.parse(iOSversion) >= version.parse("12"):
        cursor.execute('''
        select 
        datetime(zrtaddressmo.zcreationdate + 978307200, 'unixepoch'),
        datetime(zrtaddressmo.zexpirationdate + 978307200, 'unixepoch'),
        zrtaddressmo.zcountry,
        zrtaddressmo.zcountrycode,
        zrtaddressmo.zpostalcode,
        zrtaddressmo.zlocality,
        zrtaddressmo.zsublocality,
        zrtaddressmo.zthoroughfare,
        zrtaddressmo.zsubthoroughfare,
        zrtaddressmo.zsubadministrativearea,  
        zrtaddressmo.zareasofinterest
        from zrtaddressmo
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []    
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))
            
            description = 'Address'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder, 'RoutineD Cloud Addresses', description)
            report.add_script()
            data_headers = ('Address Creation Date','Address Expiration Date','Country','Country Code','Postal Code', 'Locality','Sublocality','Throroughfare','Subthroroughfare','Subadministrative Area','Area of Interest' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'RoutineD Cloud Addresses'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = 'RoutineD Cloud Addresses'
            timeline(report_folder, tlactivity, data_list, data_headers)
            

        else:
            logfunc('No RoutineD Cloud Addresses data available')
            
    if version.parse(iOSversion) >= version.parse("12"):
      cursor.execute('''
      select
      datetime(zrtmapitemmo.zcreationdate + 978307200, 'unixepoch'),
      datetime(zrtmapitemmo.zexpirationdate + 978307200, 'unixepoch'),
      zrtmapitemmo.zname,
      zrtaddressmo.zcountry,
      zrtaddressmo.zcountrycode,
      zrtaddressmo.zpostalcode,
      zrtaddressmo.zlocality,
      zrtaddressmo.zsublocality,
      zrtaddressmo.zthoroughfare,
      zrtaddressmo.zsubthoroughfare,
      zrtaddressmo.zsubadministrativearea,  
      zrtaddressmo.zareasofinterest,
      zrtmapitemmo.zlatitude,
      zrtmapitemmo.zlongitude,
      zrtmapitemmo.zuncertainty
      from zrtmapitemmo, zrtaddressmo 
      where  zrtmapitemmo.z_pk == zrtaddressmo.zmapitem
      ''')
      all_rows = cursor.fetchall()
      usageentries = len(all_rows)
      data_list = []  
      if usageentries > 0:
          for row in all_rows:
              data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14]))
          
          description = 'RoutineD Cloud Map Items'
          report = ArtifactHtmlReport('Locations')
          report.start_artifact_report(report_folder, 'RoutineD Cloud Map Items', description)
          report.add_script()
          data_headers = ('Timestamp','Map Item Expiration Date','Map Item Name','Country','Country Code','Postal Code', 'Locality','Sublocality','Throroughfare','Subthroroughfare','Subadministrative Area','Area of Interest', 'Latitude','Longitude','Uncertainty' )    
          report.write_artifact_data_table(data_headers, data_list, file_found)
          report.end_artifact_report()
          
          tsvname = 'RoutineD Cloud Map Items'
          tsv(report_folder, data_headers, data_list, tsvname)
          
          tlactivity = 'RoutineD Cloud Map Items'
          timeline(report_folder, tlactivity, data_list, data_headers)
          
          kmlactivity = 'RoutineD Cloud Map Items'
          kmlgen(report_folder, kmlactivity, data_list, data_headers)

      else:
          logfunc('No RoutineD Map Items Cloud-V2.sqlite data available')
    
        
    if (version.parse(iOSversion) >= version.parse("13")):
      cursor.execute('''
      select
      datetime(zrtlearnedvisitmo.zentrydate + 978307200, 'unixepoch'),
      datetime(zrtlearnedvisitmo.zexitdate + 978307200, 'unixepoch'),
      (zexitdate-zentrydate)/60.00,
      zrtlearnedvisitmo.zplace,
      zrtlearnedvisitmo.zdatapointcount,
      zrtaddressmo.zcountry,
      zrtaddressmo.zcountrycode,
      zrtaddressmo.zpostalcode,
      zrtaddressmo.zlocality,
      zrtaddressmo.zsublocality,
      zrtaddressmo.zthoroughfare,
      zrtaddressmo.zsubthoroughfare,
      zrtaddressmo.zsubadministrativearea,  
      zrtaddressmo.zareasofinterest,
      zrtlearnedvisitmo.zlocationuncertainty,
      zrtlearnedvisitmo.zconfidence,
      datetime(zrtlearnedvisitmo.zcreationdate + 978307200, 'unixepoch'),
      datetime(zrtlearnedvisitmo.zexpirationdate + 978307200, 'unixepoch'),
      zrtdevicemo.zdeviceclass,
      zrtdevicemo.zdevicemodel,
      zrtdevicemo.zdevicename,
      datetime(zrtlearnedplacemo.zcreationdate + 978307200, 'unixepoch'),
      datetime(zrtlearnedplacemo.zexpirationdate + 978307200, 'unixepoch'),
      datetime(zrtaddressmo.zcreationdate + 978307200, 'unixepoch'),   
      zrtlearnedvisitmo.zlocationlatitude,
      zrtlearnedvisitmo.zlocationlongitude,
      datetime(zrtmapitemmo.zcreationdate + 978307200, 'unixepoch'),
      datetime(zrtmapitemmo.zexpirationdate + 978307200, 'unixepoch'),
      zrtmapitemmo.zlatitude,
      zrtmapitemmo.zlongitude,
      zrtmapitemmo.zuncertainty,
      zrtmapitemmo.zdisplaylanguage,
      zrtmapitemmo.zname 
      from
      zrtlearnedvisitmo, zrtdevicemo, zrtlearnedplacemo,  zrtaddressmo, zrtmapitemmo
      where zrtlearnedvisitmo.zdevice = zrtdevicemo.z_pk 
      and zrtlearnedplacemo.z_pk = zrtlearnedvisitmo.zplace
      and zrtaddressmo.zmapitem = zrtlearnedplacemo.zmapitem
      and zrtmapitemmo.z_pk = zrtlearnedplacemo.zmapitem
            
      ''')

      all_rows = cursor.fetchall()
      usageentries = len(all_rows)
      data_list = []    
      if usageentries > 0:
          for row in all_rows:
              data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27], row[28], row[29], row[30], row[31], row[32]))
          
          description = 'Significant Locations - Vist Entry & Exit (Historical)'
          report = ArtifactHtmlReport('Locations')
          report.start_artifact_report(report_folder, 'RoutineD Cloud Visit Entry Exit', description)
          report.add_script()
          data_headers = ('Timestamp','Visit Exit','Visit Time (Minutes)','Place ID','Data Point Count','Country','Country Code','Postal Code', 'Locality','Sublocality','Throroughfare','Subthroroughfare','Subadministrative Area','Area of Interest', 'Location Uncertainty', 'Confidence','Visit Creation','Visit Expiration','Device Class','Device Model','Device Name','Learned Placed Creation', 'Learned Place Expiration','Address Creation', 'Latitude','Longitude','Map Item Creation Date','Map Item Expiration Date','Map Item Latitude','Map Item Longitude','Uncertainty','Map Item Language','Map Item Name' )     
          report.write_artifact_data_table(data_headers, data_list, file_found)
          report.end_artifact_report()
          
          tsvname = 'RoutineD Cloud Visit Entry Exit'
          tsv(report_folder, data_headers, data_list, tsvname)
          
          tlactivity = 'RoutineD Cloud Visit Entry Exit'
          timeline(report_folder, tlactivity, data_list, data_headers)
          
          kmlactivity = 'RoutineD Cloud Visit Entry Exit'
          kmlgen(report_folder, kmlactivity, data_list, data_headers)

      else:
          logfunc('No RoutineD Significant Locations - Vist Entry & Exit (Historical)')
          
          
Exemplo n.º 29
0
def get_routineDLocationsLocal(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("12"):
        logfunc(
            "Unsupported version for RoutineD Locations Local.sqlite on iOS " +
            iOSversion)
    else:
        for file_found in files_found:
            file_found = str(file_found)

            if file_found.endswith('Local.sqlite'):
                break

        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
            (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "ENTRY TIME (MINUTES)",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
            DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
        FROM
            ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
            LEFT JOIN
                ZRTLEARNEDLOCATIONOFINTERESTMO 
                ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = 'Significant Locations - Location of Interest Entry (Historical)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Locations Entry',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)',
                            'Coordinates', 'Latitude', 'Longitude',
                            'Confidence', 'Location Uncertainty',
                            'Data Point Count', 'Place Creation Date',
                            'Expiration', 'Visit latitude', 'Visit Longitude',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Locations Entry'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Locations Entry'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'RoutineD Locations Entry'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Significant Locations Entry data available')

        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
               (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "EXIT TIME (MINUTES)",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
            FROM
               ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
               LEFT JOIN
                  ZRTLEARNEDLOCATIONOFINTERESTMO 
                  ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST
        ''')

        if usageentries > 0:
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], row[8], row[9],
                                  row[10], row[11], row[12], row[13]))

            description = 'Significant Locations - Location of Interest Exit (Historical)'
            report = ArtifactHtmlReport('Locations')
            report.start_artifact_report(report_folder,
                                         'RoutineD Locations Exit',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Exit', 'Entry Time (Minutes)',
                            'Coordinates', 'Latitude', 'Longitude',
                            'Confidence', 'Location Uncertainty',
                            'Data Point Count', 'Place Creation Date',
                            'Expiration', 'Visit latitude', 'Visit Longitude',
                            'Table ID')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'RoutineD Locations Exit'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'RoutineD Locations Exit'
            timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No RoutineD Significant Locations Exit data available')

        if version.parse(iOSversion) >= version.parse("12"):
            # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
            # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
            cursor.execute('''
          SELECT
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE + 978307200, 'UNIXEPOCH') AS "ENTRY",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE + 978307200, 'UNIXEPOCH') AS "EXIT",
                  (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "EXIT TIME (MINUTES)",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCONFIDENCE AS "CONFIDENCE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONUNCERTAINTY AS "LOCATION UNCERTAINTY",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZDATAPOINTCOUNT AS "DATA POINT COUNT",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "PLACE CREATION DATE",
                  DATETIME(ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLATITUDE AS "VISIT LATITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONLONGITUDE AS "VISIT LONGITUDE",
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTVISITMO TABLE ID" 
              FROM
                  ZRTLEARNEDLOCATIONOFINTERESTVISITMO 
                  LEFT JOIN
                    ZRTLEARNEDLOCATIONOFINTERESTMO 
                    ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZLOCATIONOFINTEREST       
          ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            data_list = []
            if usageentries > 0:
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                description = 'Significant Locations - Location of Interest Transition Start (Historical)'
                report = ArtifactHtmlReport('Locations')
                report.start_artifact_report(report_folder,
                                             'RoutineD Transtition Start',
                                             description)
                report.add_script()
                data_headers = ('Timestamp', 'Stop',
                                'Transition Time (Minutes)', 'Coordinates',
                                'Creation Date', 'Expiration', 'Latitude',
                                'Longitude', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'RoutineD Transtition Start'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'RoutineD Transtition Start'
                timeline(report_folder, tlactivity, data_list, data_headers)

                tlactivity = 'RoutineD Transtition Start'
                timeline(report_folder, tlactivity, data_list, data_headers)

            else:
                logfunc(
                    'No RoutineD Significant Locations Transtition Start data available'
                )

        if (version.parse(iOSversion) >= version.parse("11")) and (
                version.parse(iOSversion) < version.parse("12")):
            # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/routined_local_learned_location_of_interest_entry.txt
            # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
            cursor.execute('''
          ELECT
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZSTARTDATE + 978307200, 'UNIXEPOCH') AS "START",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZSTOPDATE + 978307200, 'UNIXEPOCH') AS "STOP",
               (ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZEXITDATE-ZRTLEARNEDLOCATIONOFINTERESTVISITMO.ZENTRYDATE)/60.00 AS "TRANSITION TIME (MINUTES)",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE || ", " || ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "COORDINATES",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZCREATIONDATE + 978307200, 'UNIXEPOCH') AS "CREATION DATE",
               DATETIME(ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZEXPIRATIONDATE + 978307200, 'UNIXEPOCH') AS "EXPIRATION",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLATITUDE AS "LATITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTMO.ZLOCATIONLONGITUDE AS "LONGITUDE",
               ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.Z_PK AS "ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO TABLE ID" 
            FROM
               ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO 
               LEFT JOIN
                  ZRTLEARNEDLOCATIONOFINTERESTMO 
                  ON ZRTLEARNEDLOCATIONOFINTERESTMO.Z_PK = ZRTLEARNEDLOCATIONOFINTERESTTRANSITIONMO.ZLOCATIONOFINTEREST
          ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
            data_list = []
            if usageentries > 0:
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                description = 'Significant Locations - Location of Interest Transition Stop (Historical)'
                report = ArtifactHtmlReport('Locations')
                report.start_artifact_report(report_folder,
                                             'RoutineD Transtition Stop',
                                             description)
                report.add_script()
                data_headers = ('Timestamp', 'Stop',
                                'Transition Time (Minutes)', 'Coordinates',
                                'Creation Date', 'Expiration', 'Latitude',
                                'Longitude', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'RoutineD Transtition Stop'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'RoutineD Transtition Stop'
                timeline(report_folder, tlactivity, data_list, data_headers)

                tlactivity = 'RoutineD Transtition Stop'
                timeline(report_folder, tlactivity, data_list, data_headers)

            else:
                logfunc(
                    'No RoutineD Significant Locations Transtition Stop data available'
                )
Exemplo n.º 30
0
def get_locationDallB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("11"):
        logfunc("Unsupported version for LocationD App Harvest on iOS " +
                iOSversion)
    else:
        logfunc(iOSversion)
        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_appharvest.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute("""
		SELECT
		DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
		BUNDLEID AS "BUNDLE ID",
		LATITUDE || ", " || LONGITUDE AS "COORDINATES",
		ALTITUDE AS "ALTITUDE",
		HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
		VERTICALACCURACY AS "VERTICAL ACCURACY",
		STATE AS "STATE",
		AGE AS "AGE",
		ROUTINEMODE AS "ROUTINE MODE",
		LOCATIONOFINTERESTTYPE AS "LOCATION OF INTEREST TYPE",
		HEX(SIG) AS "SIG (HEX)",
		LATITUDE AS "LATITUDE",
		LONGITUDE AS "LONGITUDE",
		SPEED AS "SPEED",
		COURSE AS "COURSE",
		CONFIDENCE AS "CONFIDENCE"
		FROM APPHARVEST
		""")

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                     row[7], row[8], row[9], row[10], row[11], row[12],
                     row[13], row[14], row[15]))

            description = ''
            report = ArtifactHtmlReport('LocationD App Harvest')
            report.start_artifact_report(report_folder, 'App Harvest',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Bundle ID', 'Coordinates',
                            'Altitude', 'Horizontal Accuracy',
                            'Vertical Accuracy', 'State', 'Age',
                            'Routine Mode', 'Location of Interest Type',
                            'Sig (HEX)', 'Latitude', 'Longitude', 'Speed',
                            'Course', 'Confidence')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'LocationD Cell App Harvest'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'LocationD Cell App Harvest'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'LocationD Cell App Harvest'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No data available for LocationD App Harvest')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_cdmacelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	SID AS "SID",
	NID AS "NID",
	BSID AS "BSID",
	ZONEID AS "ZONEID",
	BANDCLASS AS "BANDCLASS",
	CHANNEL AS "CHANNEL",
	PNOFFSET AS "PNOFFSET",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CDMACELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13],
                 row[14], row[15], row[16], row[17]))

        description = ''
        report = ArtifactHtmlReport('LocationD CDMA Location')
        report.start_artifact_report(report_folder, 'CDMA Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'SID', 'NID',
                        'BSID', 'ZONEID', 'BANDCLASS', 'Channel', 'PNOFFSET',
                        'Altitude', 'Speed', 'Course', 'Confidence',
                        'Horizontal Accuracy', 'Vertical Accuracy', 'Latitude',
                        'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD CDMA Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD CDMA Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD CDMA Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD CDMA Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_celllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP", 
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	LAC AS "LAC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PSC AS "PSC",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM CELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14], row[15]))

        description = ''
        report = ArtifactHtmlReport('LocationD Cell Location')
        report.start_artifact_report(report_folder, 'Cell Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'LAC', 'CI',
                        'UARFCN', 'PSC', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD Cell Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD Cell Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD Cell Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD Cell Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_ltecelllocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT 
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PID AS "PID",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM LTECELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('LocationD LTE Location')
        report.start_artifact_report(report_folder, 'LTE Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'CI',
                        'UARFCN', 'PID', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD LTE Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD LTE Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD LTE Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD LTE Location')

    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_wifilocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')