Ejemplo n.º 1
0
def get_kikBplistmeta(files_found, report_folder, seeker):
	data_list = []
	for file_found in files_found:
		file_found = str(file_found)
		isDirectory = os.path.isdir(file_found)
		if isDirectory:
			pass
		else:
			sha1org = sha1scaled = blockhash = appname = layout = allowforward = filesize = filename = thumb = appid = id = ''
			with open(file_found, 'rb') as f:
				plist = biplist.readPlist(f)
				for key,val in plist.items():
					if key == 'id':
						id = val
					elif key == 'hashes':
						for x in val:
							if x['name'] == 'sha1-original':
								sha1org = x.get('value', '')
							if x['name'] == 'sha1-scaled':
								sha1scaled = x.get('value', '')
							if x['name'] == 'blockhash-scaled':
								blockhash = x.get('value', '')
					elif key == 'string':
						for x in val:
							if x['name'] == 'app-name':
								appname = x.get('value', '')
							if x['name'] == 'layout':
								layout = x.get('value', '')
							if x['name'] == 'allow-forward':
								allowforward = x.get('value', '')
							if x['name'] == 'file-size':
								filesize = x.get('value', '')
							if x['name'] == 'file-name':
								filename = x.get('value', '')
					elif key == 'image':
						thumbfilename = id+'.jpg'
						file = open(f'{report_folder}{thumbfilename}', "wb")
						file.write(val[0]['value'])
						file.close()
						thumb = f'<img src="{report_folder}{thumbfilename}"  width="300"></img>'
					elif key == 'app-id':
						appid = val
						
				data_list.append((id, filename, filesize, allowforward, layout, appname, appid, sha1org, sha1scaled, blockhash, thumb  ))
				aggregate = ''
				
	if len(data_list) > 0:
		head_tail = os.path.split(file_found)
		description = 'Metadata from Kik media directory. Source are bplist files.'
		report = ArtifactHtmlReport('Kik Attachments Bplist Metadata')
		report.start_artifact_report(report_folder, 'Kik Media Metadata', description)
		report.add_script()
		data_headers = ('Content ID ', 'Filename', 'File Size', 'Allow Forward', 'Layout','App Name','App ID', 'SHA1 Original','SHA1 Scaled','Blockhash Scaled', 'Internal Thumbnail')
		report.write_artifact_data_table(data_headers, data_list, head_tail[0],html_escape=False)
		report.end_artifact_report()
		
		tsvname = 'Kik Attachments Bplist Metadata'
		tsv(report_folder, data_headers, data_list, tsvname)
	else:
		logfunc('No data on Kik Attachments Bplist MetadataD')
Ejemplo n.º 2
0
def get_appGrouplisting(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        plist = biplist.readPlist(file_found)
        bundleid = plist['MCMMetadataIdentifier']

        p = pathlib.Path(file_found)
        appgroupid = p.parent.name

        data_list.append((bundleid, appgroupid))
        fileloc = str(p.parents[1])

    if len(data_list) > 0:

        description = 'List can included once installed but not present apps. Each file is named .com.apple.mobile_container_manager.metadata.plist'
        report = ArtifactHtmlReport('Bundle ID - AppGroup ID')
        report.start_artifact_report(report_folder, 'Bundle ID - AppGroup ID',
                                     description)
        report.add_script()
        data_headers = ('Bundle ID', 'AppGroup')
        report.write_artifact_data_table(data_headers, data_list, fileloc)
        report.end_artifact_report()

        tsvname = 'Bundle ID - AppGroup ID'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('Bundle ID - AppGroup ID')
Ejemplo n.º 3
0
def get_installedappsGass(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT 
        package_name 
        FROM
        app_info  
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Installed Apps')
        report.start_artifact_report(report_folder, 'Installed Apps (GMS)')
        report.add_script()
        data_headers = (
            'Bundle ID',
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], ))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'installed apps - GMS'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Installed Apps data available')

    db.close()
    return
Ejemplo n.º 4
0
def get_lastBuild(files_found, report_folder, seeker):
    versionnum = 0
    data_list = []
    file_found = str(files_found[0])
    with open(file_found, "rb") as fp:
        pl = plistlib.load(fp)
        for key, val in pl.items():
            data_list.append((key, val))
            if key == ("ProductVersion"):
                #ilapfuncs.globalvars()
                scripts.artifacts.artGlobals.versionf = val
                logfunc(f"iOS version: {val}")
                logdevinfo(f"iOS version: {val}")
            
            if key == "ProductBuildVersion":
                logdevinfo(f"ProductBuildVersion: {val}")
            
            if key == ("ProductName"):
                logfunc(f"Product: {val}")
                logdevinfo(f"Product: {val}")

  
    report = ArtifactHtmlReport('iOS Build')
    report.start_artifact_report(report_folder, 'Build Information')
    report.add_script()
    data_headers = ('Key','Values' )     
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()
    
    tsvname = 'Last Build'
    tsv(report_folder, data_headers, data_list, tsvname)
            
Ejemplo n.º 5
0
def get_chrome(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'History':  # skip -journal and other files
            continue
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??
        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
            datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch"),
            url,
            title,
            visit_count,
            hidden
        from urls  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} History')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} History.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Visit Time', 'URL', 'Title', 'Visit Count',
                            'Hidden')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append((textwrap.fill(row[0], width=100), row[1],
                                      row[2], row[3], row[4]))
                else:
                    data_list.append((row[0], row[1], row[2], row[3], row[4]))
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} History'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} history data available')

        db.close()
Ejemplo n.º 6
0
def get_smembersEvents(files_found, report_folder, seeker):
    
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select 
    type, 
    value, 
    datetime(created_at /1000, "unixepoch"), 
    in_snapshot
    FROM device_events
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung Members - Events')
        report.start_artifact_report(report_folder, 'Samsung Members - Events')
        report.add_script()
        data_headers = ('Type','Value','Created At','Snapshot?' )
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'samsung members - events'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Samsung Members - Events data available')
    
    db.close()
    return
Ejemplo n.º 7
0
def read_sms_messages(db, report_folder, file_found):
    cursor = db.cursor()
    cursor.execute(sms_query)
    all_rows = cursor.fetchall()
    entries = len(all_rows)
    if entries > 0:
        report = ArtifactHtmlReport('SMS messages')
        report.start_artifact_report(report_folder, 'SMS messages')
        report.add_script()
        data_headers = ('MSG ID', 'Thread ID', 'Address', 'Contact ID', 'Date',
                        'Date sent', 'Read', 'Type', 'Body', 'Service Center',
                        'Error code')
        data_list = []
        for row in all_rows:
            data_list.append((row['msg_id'], row['thread_id'], row['address'],
                              row['person'], row['date'], row['date_sent'],
                              row['read'], row['type'], row['body'],
                              row['service_center'], row['error_code']))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'sms messages'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No SMS messages found!')
Ejemplo n.º 8
0
def get_etc_hosts(files_found, report_folder, seeker, wrap_text):
    data_list = []
    file_found = str(files_found[0])

    with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile:
        for row in csvfile:
            sline = '\t'.join(row.split())
            sline = sline.split('\t')
            sline_one = sline[0]
            sline_two = sline[1]
            if (sline_one == '127.0.0.1' and sline_two == 'localhost') or \
                (sline_one == '::1' and sline_two == 'ip6-localhost'):
                pass  # Skipping the defaults, so only anomaly entries are seen
            else:
                data_list.append((sline_one, sline_two))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Etc Hosts')
        report.start_artifact_report(report_folder, f'Etc Hosts')
        report.add_script()
        data_headers = ('IP Address', 'Hostname')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Etc Hosts'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc(f'No etc hosts file available, or nothing significant found.')
Ejemplo n.º 9
0
def get_queryPredictions(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    datetime(creationTimestamp, "UNIXEPOCH") as START, 
    content,
    isSent,
    conversationId,
    id,
    uuid
    from messages 
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:    
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))

        report = ArtifactHtmlReport('Query Predictions')
        report.start_artifact_report(report_folder, 'Query Predictions')
        report.add_script()
        data_headers = ('Timestamp','Content','Is Sent?','Conversation ID','ID','UUID')   
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Query Predictions'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return   
Ejemplo n.º 10
0
def get_appleMapsApplication(files_found, report_folder, seeker):
    versionnum = 0
    file_found = str(files_found[0])
    
    with open(file_found, 'rb') as f:
        deserialized_plist = plistlib.load(f)
        
        types = {'1': {'type': 'double', 'name': 'Latitude'},
                '2': {'type': 'double', 'name': 'Longitude'}, 
                '3': {'type': 'double', 'name': ''}, 
                '4': {'type': 'fixed64', 'name': ''}, 
                '5': {'type': 'double', 'name': ''}
                }    
        
        internal_deserialized_plist, di = blackboxprotobuf.decode_message((deserialized_plist['__internal__LastActivityCamera']),types)
        latitude = (internal_deserialized_plist['Latitude'])
        longitude = (internal_deserialized_plist['Longitude'])
        
        data_list = []
        data_list.append((latitude, longitude))
        report = ArtifactHtmlReport('Apple Maps App')
        report.start_artifact_report(report_folder, 'Apple Maps App')
        report.add_script()
        data_headers = ('Latitude','Longitude' )     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Apple Maps Application'
        tsv(report_folder, data_headers, data_list, tsvname)
    

            
Ejemplo n.º 11
0
def get_appLockerfishingnetdb(files_found, report_folder, seeker, wrap_text):

    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        message = 'The located database is encrypted. It contains information regarding the source directory of the encrypted files, timestamp metadata, and original filenames.'
        decryptioninst = 'To decrypt follow the instructions at the following URL: https://theincidentalchewtoy.wordpress.com/2021/12/07/decrypting-the-calculator-apps/'
        keytodecrypt = 'Rny48Ni8aPjYCnUI'

        data_list.append((message, decryptioninst, keytodecrypt))

        if data_list:
            report = ArtifactHtmlReport('Calculator Locker Database')
            report.start_artifact_report(report_folder,
                                         'Calculator Locker Database')
            report.add_script()
            data_headers = ('Encrypted Pattern', 'Decrypted Pattern',
                            'Key To Decrypt')
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_no_escape=['Media'])
            report.end_artifact_report()

            tsvname = f'Calculator Locker Database data'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Calculator Locker Database data available')
Ejemplo n.º 12
0
def get_celWireless(files_found, report_folder, seeker):
    data_list = []
    for filepath in files_found:
        basename = os.path.basename(filepath)
        if (basename == "com.apple.commcenter.device_specific_nobackup.plist"
                or basename == "com.apple.commcenter.plist"):
            p = open(filepath, "rb")
            plist = plistlib.load(p)
            for key, val in plist.items():
                data_list.append((key, val, filepath))
                if key == "ReportedPhoneNumber":
                    logdevinfo(f"Reported Phone Number: {val}")

                if key == "CDMANetworkPhoneNumberICCID":
                    logdevinfo(f"CDMA Network Phone Number ICCID: {val}")

                if key == "imei":
                    logdevinfo(f"IMEI: {val}")

                if key == "LastKnownICCID":
                    logdevinfo(f"Last Known ICCID: {val}")

                if key == "meid":
                    logdevinfo(f"MEID: {val}")

    location = 'see source field'
    report = ArtifactHtmlReport('Cellular Wireless')
    report.start_artifact_report(report_folder, 'Cellular Wireless')
    report.add_script()
    data_headers = ('Key', 'Values', 'Source')
    report.write_artifact_data_table(data_headers, data_list, location)
    report.end_artifact_report()

    tsvname = 'Cellular Wireless'
    tsv(report_folder, data_headers, data_list, tsvname)
Ejemplo n.º 13
0
def get_smanagerCrash(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    package_name,
    datetime(crash_time / 1000, "unixepoch")
    from crash_info
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung Smart Manager - Crash')
        report.start_artifact_report(report_folder,
                                     'Samsung Smart Manager - Crash')
        report.add_script()
        data_headers = ('Package Name', 'Timestamp')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'samsung smart manager - crash'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Samsung Smart Manager - Crash data available')

    db.close()
    return
Ejemplo n.º 14
0
def get_filesAppsm(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)
        
        if file_found.endswith('smartfolders.db'):
            break
            
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT * 
    FROM
    FILENAMES
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:

        for row in all_rows:
            
            output_file = open(os.path.join(report_folder, row[2]+'.bplist'), "wb") 
            output_file.write(row[1])
            output_file.close()
            
            with open(os.path.join(report_folder, row[2]+'.bplist'), "rb") as f:
                deserialized_plist = nd.deserialize_plist(f)
            for x, y in deserialized_plist.items():
                if x == '_creationDate':
                    creationdate = y
                if x == '_contentModificationDate':
                    contentmodificationdate = y
                if x == '_flags':
                    flags = y
                if x == '_userInfo':
                    userinfo = y
                if x == '_childItemCount':
                    childitemcount = y
            lasthitdate = datetime.datetime.fromtimestamp(row[3])
            
            data_list.append((lasthitdate, row[0], row[2],row[4], creationdate, contentmodificationdate, userinfo, childitemcount, flags))
            
            description = 'Files App - Files stored in the "On my iPad" area.'
            report = ArtifactHtmlReport('Files App - Filenames')
            report.start_artifact_report(report_folder, 'Files App - Filenames', description)
            report.add_script()
            data_headers = ('Last Hit Date','Folder ID','Filename','Frequency at Las Hit Date','Creation Date','Modification Date','User Info','Child Item Count','Flags' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Files App - Filenames'
            tsv(report_folder, data_headers, data_list, tsvname)
        
            tlactivity = 'Files App - Filenames'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Files App - Filenames data available')

    db.close()
Ejemplo n.º 15
0
def get_medicalID(files_found, report_folder, seeker):
    data_list = []
    file_found = str(files_found[0])
    with open(file_found, 'rb') as f:
        deserialized_plist = nd.deserialize_plist(f)
        for key, value in deserialized_plist.items():
            key_name = get_name(key)
            if isinstance(value, dict):
                unit = value.get('UnitKey', {}).get('HKUnitStringKey', '')
                val = str(value.get('ValueKey', ''))
                if unit:
                    val += ' ' + unit
                data_list.append((key_name, val))
            elif isinstance(value, list):
                # not seen!
                data_list.append((key_name, str(value)))
            else:
                data_list.append((key_name, value))

    if len(data_list) > 0:
        description = 'User entered Medical information about self'
        report = ArtifactHtmlReport('Medical ID')
        report.start_artifact_report(report_folder, 'Health Info', description)
        report.add_script()
        data_headers = ('Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Medical ID'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Medical ID'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No data on Medical ID')
Ejemplo n.º 16
0
def get_tileAppDb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('tile-TileNetworkDB.sqlite'):
            break

    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(ZTIMESTAMP,'unixepoch','31 years'),
    ZNAME,
    datetime(ZACTIVATION_TIMESTAMP,'unixepoch','31 years'),
    datetime(ZREGISTRATION_TIMESTAMP,'unixepoch','31 years'),
    ZALTITUDE, 
    ZLATITUDE, 
    ZLONGITUDE,
    ZID,
    ZNODE_TYPE, 
    ZSTATUS,
    ZIS_LOST,
    datetime(ZLAST_LOST_TILE_COMMUNITY_CONNECTION,'unixepoch','31 years')
    FROM ZTILENTITY_NODE INNER JOIN ZTILENTITY_TILESTATE ON ZTILENTITY_NODE.ZTILE_STATE = ZTILENTITY_TILESTATE.Z_PK
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11]))

            description = ''
            report = ArtifactHtmlReport(
                'Tile App - Tile Information & Geolocation')
            report.start_artifact_report(report_folder,
                                         'Tile App DB Info & Geolocation',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Tile Name', 'Activation Timestamp',
                            'Registration Timestamp', 'Altitude', 'Latitude',
                            'Longitude', 'Tile ID', 'Tile Type', 'Status',
                            'Is Lost?', 'Last Community Connection')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Tile App DB Info Geolocation'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Tile App DB Info Geolocation'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Tile App DB data available')

    db.close()
    return
Ejemplo n.º 17
0
def process_accounts_ce(folder, uid, report_folder):

    #Query to create report
    db = sqlite3.connect(folder)
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT
        name,
        type,
        password
    FROM
    accounts
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Accounts_ce')
        report.start_artifact_report(report_folder, f'accounts_ce_{uid}')
        report.add_script()
        data_headers = ('Name', 'Type', 'Password')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()

        tsvname = f'accounts ce {uid}'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No accounts_ce_{uid} data available')
    db.close()
Ejemplo n.º 18
0
def get_aggDictpasscode(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATE(DAYSSINCE1970*86400, 'unixepoch') AS DAY,
	KEY AS "KEY",
	VALUE AS "VALUE"
	FROM
	SCALARS
	where key like 'com.apple.passcode.NumPasscode%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2]))

    description = ''
    report = ArtifactHtmlReport('Aggregate Dictionary Passcode State')
    report.start_artifact_report(report_folder, 'Passcode State', description)
    report.add_script()
    data_headers = ('Day', 'Key', 'Value')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Agg Dict Dictionary Passcode State'
    tsv(report_folder, data_headers, data_list, tsvname)
Ejemplo n.º 19
0
def get_pSettings(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select 
    name,
    value
    from partner
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Partner Settings')
        report.start_artifact_report(report_folder, 'Partner Settings')
        report.add_script()
        data_headers = (
            'Name', 'Value'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'partner settings'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Partner Settings data available')

    db.close()
    return
Ejemplo n.º 20
0
def get_appGrouplisting(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        with open(file_found, "rb") as fp:
            if sys.version_info >= (3, 9):
                plist = plistlib.load(fp)
            else:
                plist = biplist.readPlist(fp)
            bundleid = plist['MCMMetadataIdentifier']

            p = pathlib.Path(file_found)
            appgroupid = p.parent.name
            fileloc = str(p.parents[1])
            typedir = str(p.parents[1].name)

            data_list.append((bundleid, typedir, appgroupid, fileloc))

    if len(data_list) > 0:
        filelocdesc = 'Path column in the report'
        description = 'List can included once installed but not present apps. Each file is named .com.apple.mobile_container_manager.metadata.plist'
        report = ArtifactHtmlReport('Bundle ID by AppGroup & PluginKit IDs')
        report.start_artifact_report(report_folder,
                                     'Bundle ID by AppGroup & PluginKit IDs',
                                     description)
        report.add_script()
        data_headers = ('Bundle ID', 'Type', 'Directory GUID', 'Path')
        report.write_artifact_data_table(data_headers, data_list, filelocdesc)
        report.end_artifact_report()

        tsvname = 'Bundle ID - AppGroup ID - PluginKit ID'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data on Bundle ID - AppGroup ID - PluginKit ID')
Ejemplo n.º 21
0
def get_safariRecentWebSearches(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        with open(file_found, "rb") as fp:
            try:
                if sys.version_info >= (3, 9):
                    plist = plistlib.load(fp)
                else:
                    plist = biplist.readPlist(fp)
                searches = plist.get('RecentWebSearches', [])
                for search in searches:
                    term = search.get('SearchString', '')
                    date = search.get('Date', '')
                    data_list.append((date, term))
            except (biplist.InvalidPlistException, plistlib.InvalidFileException) as ex:
                logfunc(f'Failed to read plist {file_found} ' + str(ex))
                
    if data_list:
        report = ArtifactHtmlReport('Safari Recent WebSearches')
        report.start_artifact_report(report_folder, 'Recent WebSearches')
        report.add_script()
        data_headers = ('Date','Search Term')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Recent WebSearches'
        tsv(report_folder, data_headers, data_list, tsvname)
        timeline(report_folder, tsvname, data_list, data_headers)
    else:
        logfunc('No data for recent web searches')
Ejemplo n.º 22
0
def get_vlcThumbs(files_found, report_folder, seeker, wrap_text):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        
        data_file_real_path = file_found
        shutil.copy2(data_file_real_path, report_folder)
        data_file_name = os.path.basename(data_file_real_path)
        thumb = f'<img src="{report_folder}/{data_file_name}"></img>'
        
        data_list.append((data_file_name, thumb))
    
    path_to_files = os.path.dirname(data_file_real_path)
    
    description = 'VLC Thumbnails'
    report = ArtifactHtmlReport('VLC Thumbnails')
    report.start_artifact_report(report_folder, 'VLC Thumbnails', description)
    report.add_script()
    data_headers = ('Filename', 'Thumbnail' )
    report.write_artifact_data_table(data_headers, data_list, path_to_files, html_escape=False)
    report.end_artifact_report()
    
    tsvname = 'VLC Thumbnails'
    tsv(report_folder, data_headers, data_list, tsvname)
        


        
        
Ejemplo n.º 23
0
def get_cacheRoutesGmap(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        with open(file_found, 'rb') as f:
            deserialized = plistlib.load(f)
            length = len(deserialized['$objects'])
            for x in range(length):
                try:
                    lat = deserialized['$objects'][x]['_coordinateLat']
                    lon = deserialized['$objects'][x][
                        '_coordinateLong']  #lat longs
                    data_list.append((file_found, lat, lon))
                except:
                    pass

    if len(data_list) > 0:
        description = 'Google Maps Cache Routes'
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder, 'Google Maps Cache Routes',
                                     description)
        report.add_script()
        data_headers = ('Source File', 'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Google Maps Cache Routes'
        tsv(report_folder, data_headers, data_list, tsvname)
Ejemplo n.º 24
0
def get_etc_hosts(files_found, report_folder, seeker, wrap_text):
    data_list = []
    file_found = str(files_found[0])

    with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile:
        for row in csvfile:
            sline = '\t'.join(row.split())
            sline = sline.split('\t')
            sline_one = sline[0]
            sline_two = sline[1]
            data_list.append((sline_one, sline_two))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Etc Hosts')
        report.start_artifact_report(report_folder, f'Etc Hosts')
        report.add_script()
        data_headers = ('IP Address', 'Hostname')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Etc Hosts'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc(f'No etc hosts file available')
Ejemplo n.º 25
0
def get_chromeNetworkActionPredictor(files_found, report_folder, seeker,
                                     wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('Network Action Predictor'):
            continue  # Skip all other files

        browser_name = get_browser_name(file_found)
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
        user_text,
        url,
        number_of_hits,
        number_of_misses
        from network_action_predictor
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(
                f'{browser_name} - Network Action Predictor')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(
                report_folder,
                f'{browser_name} - Network Action Predictor.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = (
                'User Text', 'URL', 'Number of Hits', 'Number of Misses'
            )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} - Network Action Predictor'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc(
                f'No {browser_name} - Network Action Predictor data available')

        db.close()
        return
Ejemplo n.º 26
0
def get_iTunesBackupInfo(files_found, report_folder, seeker):
    versionnum = 0
    data_list = []
    file_found = str(files_found[0])
    with open(file_found, "rb") as fp:
        pl = plistlib.load(fp)
        for key, val in pl.items():
            if isinstance(val, str) or isinstance(val, int) or isinstance(
                    val, datetime.datetime):
                data_list.append((key, val))
                if key in ('Build Version', 'Device Name', 'ICCID', 'IMEI',
                           'Last Backup Date', 'MEID', 'Phone Number',
                           'Product Name', 'Product Type', 'Product Version',
                           'Serial Number'):
                    logdevinfo(f"{key}: {val}")

                if key == ('Product Version'):
                    scripts.artifacts.artGlobals.versionf = val
                    logfunc(f"iOS version: {val}")

            elif key == "Installed Applications":
                data_list.append((key, ', '.join(val)))

    report = ArtifactHtmlReport('iTunes Backup')
    report.start_artifact_report(report_folder, 'iTunes Backup Information')
    report.add_script()
    data_headers = ('Key', 'Values')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'iTunes Backup'
    tsv(report_folder, data_headers, data_list, tsvname)
Ejemplo n.º 27
0
def get_keyboardAppUsage(files_found, report_folder, seeker):
    data_list = []

    for file_found in files_found:
        file_found = str(file_found)
        with open(file_found, "rb") as plist_file:
            plist_content = plistlib.load(plist_file)
            for app in plist_content:
                for entry in plist_content[app]:
                    data_list.append(
                        (entry['startDate'], app, entry['appTime'],
                         ', '.join(map(str, entry['keyboardTimes']))))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Keyboard Application Usage')
        report.start_artifact_report(report_folder,
                                     'Keyboard Application Usage')
        report.add_script()
        data_headers = ('Date', 'Application Name',
                        'Application Time Used in Seconds',
                        'Keyboard Times Used in Seconds')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Keyboard Application Usage'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Keyboard Application Usage'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No Keyboard Application Usage found')
Ejemplo n.º 28
0
def get_cmh(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        datetime(datetaken /1000, "unixepoch") as times,
        latitude,
        longitude,
        address_text,
        uri,
        _data
    FROM location_view
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung CMH')
        report.start_artifact_report(report_folder, f'Geodata')
        report.add_script()
        data_headers = ('Data Taken', 'Latitude', 'Longitude', 'Address',
                        'URI', 'Data Location')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Samsung CMH Geodata'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No Samsung_CMH_GeoData available')
    db.close()
    return
Ejemplo n.º 29
0
def get_Zapya(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT device, name, direction, createtime, path, title FROM transfer
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Zapya')
        report.start_artifact_report(report_folder, 'Zapya')
        report.add_script()
        data_headers = (
            'Device', 'Name', 'direction', 'createtime', 'path', 'title'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Zapya'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Zapya'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Zapya data available')

    db.close()
    return
Ejemplo n.º 30
0
def get_factory_reset(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('factory_reset'):
            continue # Skip all other files
        
        data_list = []
        file_name = 'factory_reset'
        
        modTimesinceEpoc = os.path.getmtime(file_found)

        reset_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modTimesinceEpoc))
                     
        logdevinfo(f"Factory Reset Timestamp: {reset_time}")
        data_list.append((reset_time, file_name))
                     
        if data_list:
            report = ArtifactHtmlReport('Factory Reset')
            report.start_artifact_report(report_folder, 'Factory Reset')
            report.add_script()
            data_headers = ('Timestamp', 'File Name')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'Factory Reset'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Factory Reset'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
        else:
            logfunc('No Factory Reset data available')