Exemplo n.º 1
0
def mark_item_active(data, itemname):
    '''Finds itemname in data, then marks that node as active. Return value is changed data'''
    pos = data.find(f'" href="{itemname}"')
    if pos < 0:
        logfunc(f'Error, could not find {itemname} in {data}')
        return data
    else:
        ret = data[0:pos] + " active" + data[pos:]
        return ret
Exemplo n.º 2
0
def get_chromeTopSites(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'Top Sites':  # skip -journal and other files
            continue
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        try:
            cursor.execute('''
            select
            url,
            url_rank,
            title,
            redirects
            FROM
            top_sites ORDER by url_rank asc
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} Top Sites')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} Top Sites.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('URL', 'Rank', 'Title', 'Redirects')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} top sites'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc(f'No {browser_name} Top Sites data available')

        db.close()
Exemplo n.º 3
0
def get_locationDlteloc(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT 
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MCC AS "MCC",
	MNC AS "MNC",
	CI AS "CI",
	UARFCN AS "UARFCN",
	PID AS "PID",
	ALTITUDE AS "ALTITUDE",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM LTECELLLOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7], row[8], row[9], row[10], row[11],
                              row[12], row[13], row[14]))

        description = ''
        report = ArtifactHtmlReport('LocationD LTE Location')
        report.start_artifact_report(report_folder, 'LTE Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MCC', 'MNC', 'CI',
                        'UARFCN', 'PID', 'Altitude', 'Speed', 'Course',
                        'Confidence', 'Horizontal Accuracy',
                        'Vertical Accuracy', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD LTE Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD LTE Location'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD LTE Location')

    db.close()
    return
Exemplo n.º 4
0
def get_knowCactivitylvl(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        DATETIME(ZOBJECT.ZSTARTDATE+978307200,'UNIXEPOCH') AS "START", 
        DATETIME(ZOBJECT.ZENDDATE+978307200,'UNIXEPOCH') AS "END",
        ZOBJECT.ZVALUEDOUBLE AS "VALUE",
        (ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE) AS "USAGE IN SECONDS",
        (ZOBJECT.ZENDDATE - ZOBJECT.ZSTARTDATE)/60.00 AS "USAGE IN MINUTES", 
        CASE ZOBJECT.ZSTARTDAYOFWEEK 
            WHEN "1" THEN "Sunday"
            WHEN "2" THEN "Monday"
            WHEN "3" THEN "Tuesday"
            WHEN "4" THEN "Wednesday"
            WHEN "5" THEN "Thursday"
            WHEN "6" THEN "Friday"
            WHEN "7" THEN "Saturday"
        END "DAY OF WEEK",
        ZOBJECT.ZSECONDSFROMGMT/3600 AS "GMT OFFSET",
        DATETIME(ZOBJECT.ZCREATIONDATE+978307200,'UNIXEPOCH') AS "ENTRY CREATION",
        ZOBJECT.ZUUID AS "UUID",  
        ZOBJECT.Z_PK AS "ZOBJECT TABLE ID" 
    FROM
        ZOBJECT 
        LEFT JOIN
            ZSTRUCTUREDMETADATA 
            ON ZOBJECT.ZSTRUCTUREDMETADATA = ZSTRUCTUREDMETADATA.Z_PK 
        LEFT JOIN
            ZSOURCE 
            ON ZOBJECT.ZSOURCE = ZSOURCE.Z_PK 
    WHERE
        ZSTREAMNAME = "/activity/level" 
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:    data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9]))

        report = ArtifactHtmlReport('KnowledgeC Activity Level')
        report.start_artifact_report(report_folder, 'Activity Level')
        report.add_script()
        data_headers = ('Start','End','Value','Usage in Seconds','Usage in Minutes','Day of Week','GMT Offset','Entry Creation','UUID','ZOBJECT Table ID' )   
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'KnowledgeC Activity Level'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No data available in table')

    db.close()
    return      
Exemplo n.º 5
0
def get_safariWebsearch(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute("""
	select
	datetime(history_visits.visit_time+978307200,'unixepoch') ,
	history_items.url,
	history_items.visit_count,
	history_visits.title,
	case history_visits.origin
	when 1 then "icloud synced"
	when 0 then "visited local device"
	else history_visits.origin
	end "icloud sync",
	history_visits.load_successful,
	history_visits.id,
	history_visits.redirect_source,
	history_visits.redirect_destination
	from history_items, history_visits 
	where history_items.id = history_visits.history_item
	and history_items.url like '%search?q=%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            search = row[1].split('search?q=')[1].split('&')[0]
            search = search.replace('+', ' ')
            data_list.append((row[0], search, row[1], row[2], row[3], row[4],
                              row[5], row[6], row[7], row[8]))

        description = ''
        report = ArtifactHtmlReport('Safari Browser')
        report.start_artifact_report(report_folder, 'Search Terms',
                                     description)
        report.add_script()
        data_headers = ('Visit Time', 'Search Term', 'URL', 'Visit Count',
                        'Title', 'iCloud Sync', 'Load Successful', 'Visit ID',
                        'Redirect Source', 'Redirect Destination')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Safari Web Search'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Safari Web Search'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available in table')

    db.close()
    return
Exemplo n.º 6
0
def get_googlemapaudio(files_found, report_folder, seeker, wrap_text):

    files_found = list(filter(lambda x: "sbin" not in x, files_found))

    data_headers = ("Timestamp", "Filename", "Audio", "Size")
    audio_info = []
    source_dir = ""

    pattern = r"-?\d+_\d+"

    for file_found in files_found:

        name = Path(file_found).name

        match = fullmatch(pattern, name)
        file_size = getsize(file_found)
        has_data = file_size > 0

        if match and has_data:

            # Timestamp
            timestamp = Path(file_found).name.split("_")[1]
            timestamp_datetime = datetime.fromtimestamp(int(timestamp) / 1000)
            timestamp_str = timestamp_datetime.isoformat(timespec="seconds",
                                                         sep=" ")

            # Audio
            audio = media_to_html(name, files_found, report_folder)

            # Size
            file_size_kb = f"{round(file_size / 1024, 2)} kb"

            # Artefacts
            info = (timestamp_str, name, audio, file_size_kb)
            audio_info.append(info)

    if audio_info:

        source_dir = str(Path(files_found[0]).parent)

        report = ArtifactHtmlReport('Google Maps Voice Guidance')
        report.start_artifact_report(report_folder,
                                     'Google Maps Voice Guidance')
        report.add_script()

        report.write_artifact_data_table(data_headers,
                                         audio_info,
                                         source_dir,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'Google Map Audio'
        tsv(report_folder, data_headers, audio_info, tsvname, source_dir)

    else:
        logfunc('No Google Audio Locations found')
Exemplo n.º 7
0
def get_quicksearch(files_found, report_folder, seeker):
    sessions = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(
                file_found):  # skip folders (there shouldn't be any)
            continue

        file_name = os.path.basename(file_found)
        with open(file_found, 'rb') as f:
            pb = f.read()
            values, types = blackboxprotobuf.decode_message(pb)
            file_last_mod_date = str(ReadUnixTime(
                os.path.getmtime(file_found)))
            s = parse_session_data(values, file_name, file_last_mod_date,
                                   report_folder)
            sessions.append(s)

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    entries = len(sessions)
    if entries > 0:
        description = "Recently searched terms from the Google Search widget and any interaction with the Google Personal Assistant / app (previously known as 'Google Now') appear here."
        report = ArtifactHtmlReport('Google App & Quick Search queries')
        report.start_artifact_report(report_folder,
                                     'Searches & Personal assistant',
                                     description)
        report.add_script()
        data_headers = ('File Timestamp', 'Type', 'Queries', 'Response',
                        'Source File')
        data_list = []
        for s in sessions:
            response = ''
            if s.mp3_path:
                filename = os.path.basename(s.mp3_path)
                response = f'<audio controls><source src="{folder_name}/{filename}"></audio>'
            data_list.append((s.file_last_mod_date, s.session_type,
                              escape(', '.join(s.session_queries)), response,
                              s.source_file))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         '',
                                         html_escape=False,
                                         write_location=False)
        report.end_artifact_report()

        tsvname = f'google quick search box'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No recent quick search or now data available')
Exemplo n.º 8
0
def get_deviceActivator(files_found, report_folder, seeker):
    data_list = []
    alllines = ''
    file_found = str(files_found[0])

    with open(file_found, 'r') as f_in:
        for line in f_in:
            line = line.strip()
            alllines = alllines + line

    found = re.findall(
        '<key>ActivationInfoXML</key><data>(.*)</data><key>RKCertification</key><data>',
        alllines)
    base64_message = found[0]

    data = base64.b64decode(base64_message)

    outpath = os.path.join(report_folder, "results.xml")
    with open(outpath, 'wb') as f_out:
        f_out.write(data)

    xmlfile = outpath
    tree = ET.parse(xmlfile)
    root = tree.getroot()

    for elem in root:
        for elemx in elem:
            for elemz in elemx:
                data_list.append(str(elemz.text).strip())

    it = iter(data_list)
    results = list(zip(it, it))

    for x in results:
        if x[0] == 'EthernetMacAddress':
            logdevinfo(f"Ethernet Mac Address: {x[1]}")
        if x[0] == 'BluetoothAddress':
            logdevinfo(f"Bluetooth Address: {x[1]}")
        if x[0] == 'WifiAddress':
            logdevinfo(f"Wifi Address: {x[1]}")
        if x[0] == 'ModelNumber':
            logdevinfo(f"Model Number: {x[1]}")

    if len(results) > 0:
        report = ArtifactHtmlReport('iOS Device Activator Data')
        report.start_artifact_report(report_folder,
                                     'iOS Device Activator Data')
        report.add_script()
        data_headers = ('Key', 'Values')
        report.write_artifact_data_table(data_headers, results, file_found)
        report.end_artifact_report()

        tsvname = 'iOS Device Activator Data'
        tsv(report_folder, data_headers, results, tsvname)
    else:
        logfunc('No iOS Device Activator Data')
Exemplo n.º 9
0
def get_powerlogAudio(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        # The following SQL query is taken from # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedC_stepcounthistory.txt
        # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            DATETIME(TIMESTAMPLOGGED, 'UNIXEPOCH') AS "TIMESTAMP LOGGED",
            APPLICATIONNAME AS "APPLICATION NAME / BUNDLE ID",
            ASSERTIONID AS "ASERTION ID",
            ASSERTIONNAME AS "ASSERTION NAME",
            AUDIOROUTE AS "AUDIO ROUTE",
            MIRRORINGSTATE AS "MIRRORING STATE",
            OPERATION,
            PID,
            ID AS "PLAUDIOAGENT_EVENTPOINT_AUDIOAPP TABLE ID" 
            FROM
            PLAUDIOAGENT_EVENTPOINT_AUDIOAPP
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog Audio Routing via App')
                report.start_artifact_report(report_folder, 'Audio Routing')
                report.add_script()
                data_headers = ('Timestamp', 'Timestamped Logged', 'Bundle ID',
                                'Assertion Name', 'Audio Route',
                                'Mirroring State', 'Operation', 'PID',
                                'Audio App Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog Audio Routing App'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'Powerlog Audio Routing App'
                timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No data available in Airdop Connection Info')

        db.close()
        return
Exemplo n.º 10
0
def get_airGuard(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)

    cursor = db.cursor()
    cursor.execute('''
    SELECT
    device.lastSeen AS "Last Time Device Seen",
    beacon.receivedAt AS "Time (Local)",
    beacon.deviceAddress AS "Device MAC Address",
    beacon.longitude AS "Latitude",
    beacon.latitude as "Longitude",
    beacon.rssi AS "Signal Strength (RSSI)",
    device.firstDiscovery AS "First Time Device Seen",
    device.lastNotificationSent as "Last Time User Notified"
    FROM
    beacon
    LEFT JOIN device on device.address=beacon.deviceAddress
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('AirGuard AirTag Tracker')
        report.start_artifact_report(report_folder, 'AirGuard AirTag Tracker')
        report.add_script()
        data_headers = ('Last Time Device Seen', 'Time (Local)',
                        'Device MAC Address', 'Latitude', 'Longitude',
                        'Signal Strength (RSSI)', 'First Time Device Seen',
                        'Last Time User Notified')
        data_headers_kml = ('Timestamp', 'Time (Local)', 'Device MAC Address',
                            'Latitude', 'Longitude', 'Signal Strength (RSSI)',
                            'First Time Device Seen',
                            'Last Time User Notified')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'AirGuard AirTag Tracker'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'AirGuard AirTag Tracker'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'AirGuard AirTag Tracker'
        kmlgen(report_folder, kmlactivity, data_list, data_headers_kml)

    else:
        logfunc('No AirGuard AirTag Tracker data available')

    db.close()
Exemplo n.º 11
0
def get_wellbeing(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('app_usage'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT 
                events._id, 
                datetime(events.timestamp /1000, 'UNIXEPOCH') as timestamps, 
                packages.package_name,
                events.type,
                case
                    when events.type = 1 THEN 'ACTIVITY_RESUMED'
                    when events.type = 2 THEN 'ACTIVITY_PAUSED'
                    when events.type = 12 THEN 'NOTIFICATION'
                    when events.type = 18 THEN 'KEYGUARD_HIDDEN & || Device Unlock'
                    when events.type = 19 THEN 'FOREGROUND_SERVICE_START'
                    when events.type = 20 THEN 'FOREGROUND_SERVICE_STOP' 
                    when events.type = 23 THEN 'ACTIVITY_STOPPED'
                    when events.type = 26 THEN 'DEVICE_SHUTDOWN'
                    when events.type = 27 THEN 'DEVICE_STARTUP'
                    else events.type
                    END as eventtype
                FROM
                events INNER JOIN packages ON events.package_id=packages._id 
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Wellbeing events')
            report.start_artifact_report(report_folder, 'Events')
            report.add_script()
            data_headers = ('Timestamp', 'Package ID', 'Event Type')
            data_list = []
            for row in all_rows:
                data_list.append((row[1], row[2], row[4]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'wellbeing - events'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Wellbeing - Events'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Wellbeing event data available')

        db.close()
        return
Exemplo n.º 12
0
def get_locationDwifilocB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')

    db.close()
    return
Exemplo n.º 13
0
def get_cashApp(files_found, report_folder, seeker, wrap_text):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('.db'):
            db = open_sqlite_db_readonly(file_found)
            cursor = db.cursor()
            cursor.execute('''Select 
        payment.role,
        payment.sender_id,
        CASE WHEN customer.cashtag IS NULL THEN '***NO CASH TAG PRESENT***' ELSE customer.cashtag END,
        customer.customer_display_name,
        payment.recipient_id,
        CASE WHEN customer1.cashtag IS NULL THEN '***NO CASH TAG PRESENT***' ELSE customer1.cashtag END,
        customer1.customer_display_name,
        payment.state,
        datetime(payment.display_date / 1000.0, 'unixepoch'),
        CASE WHEN json_extract (payment.render_data, '$."note"') IS NULL THEN '***NO NOTE SUBMITTED***' ELSE json_extract (payment.render_data, '$."note"') END,
        printf("$%.2f", json_extract(payment.render_data, '$."amount"."amount"') / 100.0)
    From payment
        Inner Join customer On customer.customer_id = payment.sender_id
        Inner Join customer customer1 On payment.recipient_id = customer1.customer_id
    
    ORDER BY payment.display_date DESC
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Transactions')
        report.start_artifact_report(report_folder, 'Transactions')
        report.add_script()
        data_headers = (
            'Transaction Date', 'User Account Role', 'Sender Display Name',
            'Sender Unique ID', 'Sender Cashtag', 'Recipient Display Name',
            'Recipient Unique ID', 'Recipient Cashtag', 'Transaction Amount',
            'Transaction Status', 'Note'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[8], row[0], row[3], row[1], row[2], row[6],
                              row[4], row[5], row[10], row[7], row[9]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Cash App Transactions'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Cash App Transactions'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Cash App Transactions data available')

    db.close()
    return
Exemplo n.º 14
0
def get_safariWebsearch(files_found, report_folder, seeker):
	file_found = str(files_found[0])
	db = sqlite3.connect(file_found)
	cursor = db.cursor()

	cursor.execute(
	"""
	SELECT
		DATETIME(HISTORY_VISITS.VISIT_TIME+978307200,'UNIXEPOCH') AS "VISIT TIME",
		HISTORY_ITEMS.URL AS "URL",
		HISTORY_ITEMS.VISIT_COUNT AS "VISIT COUNT",
		HISTORY_VISITS.TITLE AS "TITLE",
		CASE HISTORY_VISITS.ORIGIN
			WHEN 1 THEN "ICLOUD SYNCED DEVICE"
			WHEN 0 THEN "VISITED FROM THIS DEVICE"
			ELSE HISTORY_VISITS.ORIGIN
		END "ICLOUD SYNC",
		HISTORY_VISITS.LOAD_SUCCESSFUL AS "LOAD SUCCESSFUL",
		HISTORY_VISITS.id AS "VISIT ID",
		HISTORY_VISITS.REDIRECT_SOURCE AS "REDIRECT SOURCE",
		HISTORY_VISITS.REDIRECT_DESTINATION AS "REDIRECT DESTINATION",
		HISTORY_VISITS.ID AS "HISTORY ITEM ID"
	FROM HISTORY_ITEMS
	LEFT OUTER JOIN HISTORY_VISITS ON HISTORY_ITEMS.ID == HISTORY_VISITS.HISTORY_ITEM
	WHERE HISTORY_ITEMS.URL like '%search?q=%'
	"""
	)

	all_rows = cursor.fetchall()
	usageentries = len(all_rows)
	data_list = []    
	if usageentries > 0:
		for row in all_rows:
			search = row[1].split('search?q=')[1].split('&')[0]
			search = search.replace('+', ' ')
			data_list.append((row[0], search, row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9]))
	
		description = ''
		report = ArtifactHtmlReport('Safari Browser')
		report.start_artifact_report(report_folder, 'Search Terms', description)
		report.add_script()
		data_headers = ('Visit Time','Search Term','URL','Visit Count','Title','iCloud Sync','Load Successful','Visit ID','Redirect Source','Redirect Destination','History Item ID' )     
		report.write_artifact_data_table(data_headers, data_list, file_found)
		report.end_artifact_report()
		
		tsvname = 'Safari Web Search'
		tsv(report_folder, data_headers, data_list, tsvname)
		
		tlactivity = 'Safari Web Search'
		timeline(report_folder, tlactivity, data_list)
	else:
		logfunc('No data available in table')
	
	db.close()
	return 
	
Exemplo n.º 15
0
def get_chrome(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'History':  # skip -journal and other files
            continue
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??
        browser_name = 'Chrome'
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'

        db = sqlite3.connect(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
            url,
            title,
            visit_count,
            datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch"),
            hidden
        from urls  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} History')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} History.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('URL', 'Title', 'Visit Count', 'Last Visit Time',
                            'Hidden')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (textwrap.fill(row[0],
                                   width=100), row[1], row[2], row[3], row[4]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} History'
            tsv(report_folder, data_headers, data_list, tsvname)
        else:
            logfunc(f'No {browser_name} history data available')

        db.close()
Exemplo n.º 16
0
def get_powerlogAppinfo(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)

    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) >= version.parse("9"):
        cursor = db.cursor()
        cursor.execute('''
        SELECT
            DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
            APPNAME AS "APP NAME",
            APPEXECUTABLE AS "APP EXECUTABLE NAME",
            APPBUNDLEID AS "BUNDLE ID",
            APPBUILDVERSION AS "APP BUILD VERSION",
            APPBUNDLEVERSION AS "APP BUNDLE VERSION",
            APPTYPE AS "APP TYPE",
            CASE APPDELETEDDATE 
                WHEN 0 THEN "NOT DELETED" 
                ELSE DATETIME(APPDELETEDDATE, 'UNIXEPOCH') 
            END "APP DELETED DATE",
            ID AS "PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS TABLE ID" 
        FROM
            PLAPPLICATIONAGENT_EVENTNONE_ALLAPPS
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            if version.parse(iOSversion) >= version.parse("9"):
                for row in all_rows:
                    data_list.append((row[0], row[1], row[2], row[3], row[4],
                                      row[5], row[6], row[7], row[8]))

                report = ArtifactHtmlReport('Powerlog App Info')
                report.start_artifact_report(report_folder, 'App Info')
                report.add_script()
                data_headers = ('Timestamp', 'App Name', 'App Executable Name',
                                'Bundle ID', 'App Build Version',
                                'App Bundle Version', 'App TYpe',
                                'App Deleted Date', 'Table ID')
                report.write_artifact_data_table(data_headers, data_list,
                                                 file_found)
                report.end_artifact_report()

                tsvname = 'Powerlog App Info'
                tsv(report_folder, data_headers, data_list, tsvname)

                tlactivity = 'Powerlog App Info'
                timeline(report_folder, tlactivity, data_list, data_headers)

        else:
            logfunc('No data available in Powerlog App Info')

        db.close()
        return
Exemplo n.º 17
0
def _parse_friends(friends_count, rows, report_folder, db_file_name):
    logfunc(f'{friends_count} friends found')

    data_headers = ('Username', 'User ID', 'Display Name', 'Phone Nr',
                    'Birthday', 'Added Timestamp')
    data_list = [(row[0], row[1], row[2], row[3], row[4], row[5])
                 for row in rows]

    _make_reports(f'{APP_NAME} - Friends', data_headers, data_list,
                  report_folder, db_file_name)
def ReadUnixTime(unix_time): # Unix timestamp is time epoch beginning 1970/1/1
    '''Returns datetime object, or empty string upon error'''
    if unix_time not in ( 0, None, ''):
        try:
            if isinstance(unix_time, str):
                unix_time = float(unix_time)
            return datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=unix_time)
        except (ValueError, OverflowError, TypeError) as ex:
            logfunc("ReadUnixTime() Failed to convert timestamp from value " + str(unix_time) + " Error was: " + str(ex))
    return ''
Exemplo n.º 19
0
def get_googlemaplocation(files_found, report_folder, seeker, wrap_text):

    source_file = ''

    for file_found in files_found:
        file_found = str(file_found)

        if 'journal' in file_found:
            source_file = file_found.replace(seeker.directory, '')
            continue

        source_file = file_found.replace(seeker.directory, '')

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        try:
            cursor.execute('''
            SELECT time/1000, dest_lat, dest_lng, dest_title, dest_address, 
                   source_lat, source_lng FROM destination_history;
            ''')

            all_rows = cursor.fetchall()
            usageentries = len(all_rows)
        except:
            usageentries = 0

        if usageentries > 0:
            report = ArtifactHtmlReport('Google Map Locations')
            report.start_artifact_report(report_folder, 'Google Map Locations')
            report.add_script()
            data_headers = (
                'timestamp', 'destination_latitude', 'destination_longitude',
                'destination_title', 'destination_address', 'source_latitude',
                'source_longitude'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                timestamp = datetime.datetime.fromtimestamp(int(
                    row[0])).strftime('%Y-%m-%d %H:%M:%S')
                data_list.append(
                    (timestamp, convertGeo(str(row[1])),
                     convertGeo(str(row[2])), row[3], row[4],
                     convertGeo(str(row[5])), convertGeo(str(row[6]))))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Map Locations'
            tsv(report_folder, data_headers, data_list, tsvname, source_file)

        else:
            logfunc('No Google Map Locations found')

        db.close()
Exemplo n.º 20
0
def get_shareit(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('history.db'):
            break

    source_file = file_found.replace(seeker.directory, '')

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    try:
        cursor.execute('''
        SELECT case history_type when 1 then "Incoming" else "Outgoing" end direction,
               case history_type when 1 then device_id else null end from_id,
               case history_type when 1 then null else device_id end to_id,
               device_name, description, timestamp/1000 as timestamp, file_path
                                FROM history
                                JOIN item where history.content_id = item.item_id
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
    except:
        usageentries = 0

    if usageentries > 0:
        report = ArtifactHtmlReport('Shareit file transfer')
        report.start_artifact_report(report_folder, 'shareit file transfer')
        report.add_script()
        data_headers = (
            'direction', 'from_id', 'to_id', 'device_name', 'description',
            'timestamp', 'file_path'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            timestamp = datetime.datetime.fromtimestamp(int(
                row[5])).strftime('%Y-%m-%d %H:%M:%S')
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], timestamp, row[6]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Shareit file transfer'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

        tlactivity = f'Shareit file transfer'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Shareit file transfer data available')

    db.close()
    return
Exemplo n.º 21
0
def get_firefoxDownloads(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found
        ) == 'mozac_downloads_database':  # skip -journal and other files
            continue

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(created_at/1000,'unixepoch') AS CreatedDate,
        file_name AS FileName,
        url AS URL,
        content_type AS MimeType,
        content_length AS FileSize,
        CASE status
            WHEN 3 THEN 'Paused'
            WHEN 4 THEN 'Canceled'
            WHEN 5 THEN 'Failed'
            WHEN 6 THEN 'Finished'
        END AS Status,
        destination_directory AS DestDir
        FROM downloads
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Firefox - Downloads')
            report.start_artifact_report(report_folder, 'Firefox - Downloads')
            report.add_script()
            data_headers = ('Created Timestamp', 'File Name', 'URL',
                            'MIME Type', 'File Size (Bytes)', 'Status',
                            'Destination Directory')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5], row[6]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Firefox - Downloads'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Firefox - Downloads'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Firefox - Downloads data available')

        db.close()
def get_locationDparkedhistorical(files_found, report_folder, seeker):
    iOSversion = scripts.artifacts.artGlobals.versionf
    if version.parse(iOSversion) < version.parse("11"):
        logfunc("Unsupported version for RoutineD Parked Historical " +
                iOSversion)
        return ()

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		   DATETIME(ZRTVEHICLEEVENTHISTORYMO.ZDATE + 978307200, 'UNIXEPOCH') AS "DATE",
		   DATETIME(ZRTVEHICLEEVENTHISTORYMO.ZLOCDATE + 978307200, 'UNIXEPOCH') AS "LOCATION DATE",
		   ZLOCLATITUDE || ", " || ZLOCLONGITUDE AS "COORDINATES",
		   ZLOCUNCERTAINTY AS "LOCATION UNCERTAINTY",
		   ZIDENTIFIER AS "IDENTIFIER",
		   ZLOCLATITUDE AS "LATITUDE",
		   ZLOCLONGITUDE AS "LONGITUDE",
		   ZRTVEHICLEEVENTHISTORYMO.Z_PK AS "ZRTLEARNEDVISITMO TABLE ID" 
		FROM
		   ZRTVEHICLEEVENTHISTORYMO
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5],
                              row[6], row[7]))

        description = ''
        report = ArtifactHtmlReport('RoutineD Parked Vehicle Historical')
        report.start_artifact_report(report_folder,
                                     'Parked Vehicle Historical', description)
        report.add_script()
        data_headers = ('Date', 'Location Date', 'Coordinates',
                        'Location Uncertainty', 'Identifier', 'Latitude',
                        'Longitude', 'Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'RoutineD Parked Vehicle Historical'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'RoutineD Parked Vehicle Historical'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No data available in Routine Parked Vehicle Historical')

    db.close()
    return
Exemplo n.º 23
0
def get_Turbo(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
		case timestamp_millis
			when 0 then ''
			else datetime(timestamp_millis/1000,'unixepoch')
		End as D_T,
		battery_level,
		case charge_type
			when 0 then ''
			when 1 then 'Charging Rapidly'
			when 2 then 'Charging Slowly'
			when 3 then 'Charging Wirelessly'
		End as C_Type,
		case battery_saver
			when 2 then ''
			when 1 then 'Enabled'
		End as B_Saver,
		timezone
	from battery_event
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Turbo')
        report.start_artifact_report(report_folder, 'Turbo')
        report.add_script()
        data_headers = (
            'Date/Time', 'Battery %', 'Charge Type', 'Battery Saver',
            'Timezone'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Turbo'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Turbo'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Turbo data available')

    db.close()
    return
Exemplo n.º 24
0
def get_addressBook(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT 
    ABPerson.ROWID,
    c16Phone,
    FIRST,
    MIDDLE,
    LAST,
    c17Email,
    DATETIME(CREATIONDATE+978307200,'UNIXEPOCH'),
    DATETIME(MODIFICATIONDATE+978307200,'UNIXEPOCH'),
    NAME
    FROM ABPerson
    LEFT OUTER JOIN ABStore ON ABPerson.STOREID = ABStore.ROWID
    LEFT OUTER JOIN ABPersonFullTextSearch_content on ABPerson.ROWID = ABPersonFullTextSearch_content.ROWID
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            if row[1] is not None:
                numbers = row[1].split(" +")
                number = numbers[1].split(" ")
                phone_number = "+{}".format(number[0])
            else:
                phone_number = ''

            data_list.append((row[0], phone_number, row[2], row[3], row[4],
                              row[5], row[6], row[7], row[8]))

        report = ArtifactHtmlReport('Address Book Contacts')
        report.start_artifact_report(report_folder, 'Address Book Contacts')
        report.add_script()
        data_headers = ('Contact ID', 'Contact Number', 'First Name',
                        'Middle Name', 'Last Name', 'Email Address',
                        'Creation Date', 'Modification Date', 'Storage Place')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Address Book'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Address Book'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Address Book data available')

    db.close()
    return
Exemplo n.º 25
0
def get_tileAppDb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)
        
        if file_found.endswith('tile-TileNetworkDB.sqlite'):
            break
            
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(ZTIMESTAMP,'unixepoch','31 years'),
    ZNAME,
    datetime(ZACTIVATION_TIMESTAMP,'unixepoch','31 years'),
    datetime(ZREGISTRATION_TIMESTAMP,'unixepoch','31 years'),
    ZALTITUDE, 
    ZLATITUDE, 
    ZLONGITUDE,
    ZID,
    ZNODE_TYPE, 
    ZSTATUS,
    ZIS_LOST,
    datetime(ZLAST_LOST_TILE_COMMUNITY_CONNECTION,'unixepoch','31 years')
    FROM ZTILENTITY_NODE INNER JOIN ZTILENTITY_TILESTATE ON ZTILENTITY_NODE.ZTILE_STATE = ZTILENTITY_TILESTATE.Z_PK
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []    
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11]))
        
            description = ''
            report = ArtifactHtmlReport('Tile App - Tile Information & Geolocation')
            report.start_artifact_report(report_folder, 'Tile App DB Info & Geolocation', description)
            report.add_script()
            data_headers = ('Timestamp','Tile Name','Activation Timestamp','Registration Timestamp','Altitude','Latitude','Longitude','Tile ID','Tile Type','Status','Is Lost?','Last Community Connection' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Tile App DB Info Geolocation'
            tsv(report_folder, data_headers, data_list, tsvname)
        
            tlactivity = 'Tile App DB Info Geolocation'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
            kmlactivity = 'Tile App DB Info Geolocation'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No Tile App DB data available')

    db.close()
    return 
Exemplo n.º 26
0
def get_wellbeingURLs(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('app_usage'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT 
        datetime(component_events.timestamp/1000, "UNIXEPOCH") as timestamp,
        component_events._id,
        components.package_id, 
        packages.package_name, 
        components.component_name as website,
        CASE
        when component_events.type=1 THEN 'ACTIVITY_RESUMED'
        when component_events.type=2 THEN 'ACTIVITY_PAUSED'
        else component_events.type
        END as eventType
        FROM component_events
        INNER JOIN components ON component_events.component_id=components._id
        INNER JOIN packages ON components.package_id=packages._id
        ORDER BY timestamp
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Wellbeing URL events')
            report.start_artifact_report(report_folder, 'URL Events')
            report.add_script()
            data_headers = ('Timestamp', 'Event ID', 'Package ID',
                            'Package Name', 'Website', 'Event')
            data_list = []
            for row in all_rows:
                data_list.append(
                    (row[0], row[1], row[2], row[3], row[4], row[5]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'wellbeing - URL events'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Wellbeing - URL Events'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Wellbeing URL event data available')

        db.close()
        return
Exemplo n.º 27
0
def get_cloudkitServerSharedData(file_found, report_folder, seeker):
    user_dictionary = {}

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT Z_PK, ZSERVERSHAREDATA 
    FROM
    ZICCLOUDSYNCINGOBJECT
    WHERE
    ZSERVERSHAREDATA NOT NULL
    ''')

    all_rows = cursor.fetchall()
    for row in all_rows:
        
        filename = os.path.join(report_folder, 'zserversharedata_'+str(row[0])+'.bplist')
        output_file = open(filename, "wb") 
        output_file.write(row[1])
        output_file.close()
        
        deserialized_plist = nd.deserialize_plist(io.BytesIO(row[1]))
        for item in deserialized_plist:
            if 'Participants' in item:
                for participant in item['Participants']:
                    record_id = participant['UserIdentity']['UserRecordID']['RecordName']
                    email_address = participant['UserIdentity']['LookupInfo']['EmailAddress']
                    phone_number = participant['UserIdentity']['LookupInfo']['PhoneNumber']
                    first_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.givenName']
                    middle_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.middleName']
                    last_name = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.familyName']
                    name_prefix = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.namePrefix']
                    name_suffix = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.nameSuffix']
                    nickname = participant['UserIdentity']['NameComponents']['NS.nameComponentsPrivate']['NS.nickname']
        
                    user_dictionary[record_id] = [record_id, email_address, phone_number, name_prefix, first_name, middle_name, last_name, name_suffix, nickname]
    db.close()

    # Build the array after dealing with all the files 
    user_list = list(user_dictionary.values())

    if len(user_list) > 0:
        description = 'CloudKit Participants - Cloudkit accounts participating in CloudKit shares.'
        report = ArtifactHtmlReport('Participants')
        report.start_artifact_report(report_folder, 'Participants', description)
        report.add_script()
        user_headers = ('Record ID','Email Address','Phone Number','Name Prefix','First Name','Middle Name','Last Name','Name Suffix','Nickname')     
        report.write_artifact_data_table(user_headers, user_list, '', write_location=False)
        report.end_artifact_report()
        
        tsvname = 'Cloudkit Participants'
        tsv(report_folder, user_headers, user_list, tsvname)
    else:
        logfunc('No Cloudkit - Cloudkit Participants data available')
Exemplo n.º 28
0
def get_discreteNative(files_found, report_folder, seeker, wrap_text):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        
            
        #check if file is abx
        if (checkabx(file_found)):
            multi_root = False
            tree = abxread(file_found, multi_root)
        else:
            tree = ET.parse(file_found)
        root = tree.getroot()
    
        for elem in root:
            for subelem1 in elem:
                ptag = subelem1.tag
                ptagattrib = subelem1.attrib
                ptagattrib = ptagattrib["pn"]
                for subelem2 in subelem1:
                    otag = subelem2.tag
                    otagattrib = subelem2.attrib
                    otagattrib = otagattrib['op']
                    for subelem3 in subelem2:
                        atag = subelem3.tag
                        atagattrib = subelem3.attrib
                        atagattrib = atagattrib.get('at', '')
                        
                        for subelem4 in subelem3:
                            etag = subelem4.tag
                            etagattrib = subelem4.attrib
                            ntattrib = etagattrib.get('nt')
                            ndattrib = etagattrib.get('nd')
                            if ndattrib is None:
                                ndattrib = ''
                            else:
                                ndattrib = round(int(ndattrib) / 60, 1)
                    data_list.append((timestampcalc(ntattrib), ptagattrib, atagattrib, oplist(otagattrib), ndattrib))
                    
    if data_list:
        report = ArtifactHtmlReport('Privacy Dashboard')
        report.start_artifact_report(report_folder, 'Privacy Dashboard')
        report.add_script()
        data_headers = ('Timestamp', 'Bundle', 'Module', 'Operation', 'Usage in Seconds')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'Privacy Dashboard'
        tsv(report_folder, data_headers, data_list, tsvname)
        
        tlactivity = f'Privacy Dashboard'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Privacy Dashboard data available')
Exemplo n.º 29
0
def insert_sidebar_code(data, sidebar_code, filename):
    pos = data.find(body_sidebar_dynamic_data_placeholder)
    if pos < 0:
        logfunc(
            f'Error, could not find {body_sidebar_dynamic_data_placeholder} in file {filename}'
        )
        return data
    else:
        ret = data[0:pos] + sidebar_code + data[
            pos + len(body_sidebar_dynamic_data_placeholder):]
        return ret
Exemplo n.º 30
0
def _parse_chat_messages(messages_count, rows, report_folder, db_file_name):
    logfunc(f'{messages_count} messages found')

    data_headers = ('Timestamp', 'Thread Id', 'Thread Name', 'User Id',
                    'User Name', 'Message Text', 'Message Direction',
                    'Message Type', 'Attachment Name', 'Deleted')
    data_list = [(row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                  row[7], row[8] if row[8] else '', row[9]) for row in rows]

    _make_reports(f'{APP_NAME} - Chat', data_headers, data_list, report_folder,
                  db_file_name)