def get_runtimePerms(files_found, report_folder, seeker, wrap_text): run = 0 slash = '\\' if is_platform_windows() else '/' for file_found in files_found: file_found = str(file_found) data_list = [] run = run + 1 err = 0 parts = file_found.split(slash) if 'mirror' in parts: user = '******' elif 'system' in parts: user = parts[-2] elif 'misc_de' in parts: user = parts[-4] if user == 'mirror': continue else: try: ET.parse(file_found) except ET.ParseError: logfunc('Parse error - Non XML file.') err = 1 if err == 0: tree = ET.parse(file_found) root = tree.getroot() for elem in root: #print(elem.tag) usagetype = elem.tag name = elem.attrib['name'] #print("Usage type: "+usagetype) #print('name') for subelem in elem: permission = subelem.attrib['name'] granted = subelem.attrib['granted'] flags = subelem.attrib['flags'] data_list.append( (usagetype, name, permission, granted, flags)) if len(data_list) > 0: report = ArtifactHtmlReport('Runtime Permissions') report.start_artifact_report( report_folder, f'Runtime Permissions_{user}') report.add_script() data_headers = ('Type', 'Name', 'Permission', 'Granted?', 'Flag') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'Runtime Permissions_{user}' tsv(report_folder, data_headers, data_list, tsvname)
def get_roles(files_found, report_folder, seeker, wrap_text): run = 0 slash = '\\' if is_platform_windows() else '/' for file_found in files_found: file_found = str(file_found) data_list = [] run = run + 1 err = 0 parts = file_found.split(slash) if 'mirror' in parts: user = '******' elif 'users' in parts: user = parts[-2] ver = 'Android 10' elif 'misc_de' in parts: user = parts[-4] ver = 'Android 11' if user == 'mirror': continue else: try: ET.parse(file_found) except ET.ParseError: print('Parse error - Non XML file.') #change to logfunc err = 1 if err == 0: tree = ET.parse(file_found) root = tree.getroot() for elem in root: holder = '' role = elem.attrib['name'] for subelem in elem: holder = subelem.attrib['name'] data_list.append((role, holder)) if len(data_list) > 0: report = ArtifactHtmlReport('App Roles') report.start_artifact_report(report_folder, f'{ver} Roles_{user}') report.add_script() data_headers = ('Role', 'Holder') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() tsvname = f'App Roles_{user}' tsv(report_folder, data_headers, data_list, tsvname)
def get_settingsSecure(files_found, report_folder, seeker, wrap_text): slash = '\\' if is_platform_windows() else '/' # Filter for path xxx/yyy/system_ce/0 for file_found in files_found: file_found = str(file_found) parts = file_found.split(slash) uid = parts[-2] try: uid_int = int(uid) # Skip sbin/.magisk/mirror/data/system_de/0 , it should be duplicate data?? if file_found.find('{0}mirror{0}'.format(slash)) >= 0: continue process_ssecure(file_found, uid, report_folder) except ValueError: pass # uid was not a number
def get_recentactivity(files_found, report_folder, seeker): slash = '\\' if is_platform_windows() else '/' # Filter for path xxx/yyy/system_ce/0 for file_found in files_found: file_found = str(file_found) parts = file_found.split(slash) if len(parts) > 2 and parts[-2] == 'system_ce': uid = parts[-1] try: uid_int = int(uid) # Skip sbin/.magisk/mirror/data/system_ce/0 , it should be duplicate data?? if file_found.find('{0}mirror{0}'.format(slash)) >= 0: continue process_recentactivity(file_found, uid, report_folder) except ValueError: pass # uid was not a number
def get_usagestats(files_found, report_folder, seeker): logfunc('Android Usagestats XML & Protobuf Parser') logfunc('By: @AlexisBrignoni & @SwiftForensics') logfunc('Web: abrignoni.com & swiftforensics.com') slash = '\\' if is_platform_windows() else '/' for file_found in files_found: file_found = str(file_found) parts = file_found.split(slash) if len(parts) > 2 and parts[-2] == 'usagestats': uid = parts[-1] try: uid_int = int(uid) # Skip /sbin/.magisk/mirror/data/system/usagestats/0/ , it should be duplicate data?? if file_found.find('{0}mirror{0}'.format(slash)) >= 0: continue process_usagestats(file_found, uid, report_folder) except ValueError: pass # uid was not a number
def copyAttachments(rec): pathToAttachment = None if rec["FILENAME"]: attachment = seeker.search('**' + rec["FILENAME"].replace('~', '', 1), return_on_first_hit=True) if not attachment: logfunc( ' [!] Unable to extract attachment file: "{}"'.format( rec['FILENAME'])) return if is_platform_windows(): destFileName = sanitize_file_name( os.path.basename(rec["FILENAME"])) else: destFileName = os.path.basename(rec["FILENAME"]) pathToAttachment = os.path.join( (os.path.basename(os.path.abspath(report_folder))), destFileName) shutil.copy(attachment[0], os.path.join(report_folder, destFileName)) return pathToAttachment
import os import scripts.blackboxprotobuf as blackboxprotobuf import struct import datetime from html import escape from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, is_platform_windows is_windows = is_platform_windows() slash = '\\' if is_windows else '/' class Session: # Represents a search query session def __init__(self, source_file, file_last_mod_date, session_type, session_from, session_queries, mp3_path): self.source_file = source_file self.file_last_mod_date = file_last_mod_date self.session_type = session_type self.session_from = session_from self.session_queries = session_queries self.mp3_path = mp3_path def ReadUnixTime(unix_time): # Unix timestamp is time epoch beginning 1970/1/1 '''Returns datetime object, or empty string upon error''' if unix_time not in ( 0, None, ''): try: if isinstance(unix_time, str): unix_time = float(unix_time) return datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=unix_time) except (ValueError, OverflowError, TypeError) as ex: logfunc("ReadUnixTime() Failed to convert timestamp from value " + str(unix_time) + " Error was: " + str(ex)) return ''
def get_applicationSnapshots(files_found, report_folder, seeker): slash = '\\' if is_platform_windows() else '/' data_headers = ('App Name', 'Source Path', 'Date Modified', 'Snapshot') data_list = [ ] # Format= [ [ 'App Name', 'ktx_path', mod_date, 'png_path' ], .. ] for file_found in files_found: file_found = str(file_found) if os.path.isdir(file_found): continue if file_found.lower().endswith('.ktx'): if os.path.getsize(file_found) < 2500: # too small, they are blank continue parts = file_found.split(slash) if parts[-2] != 'downscaled': app_name = parts[-2].split(' ')[0] else: app_name = parts[-3].split(' ')[0] png_path = os.path.join(report_folder, app_name + '_' + parts[-1][:-4] + '.png') if save_ktx_to_png_if_valid(file_found, png_path): last_modified_date = datetime.datetime.fromtimestamp( os.path.getmtime(file_found)) data_list.append( [app_name, file_found, last_modified_date, png_path]) elif file_found.lower().endswith('.jpeg'): parts = file_found.split(slash) if parts[-2] != 'downscaled': app_name = parts[-2].split(' ')[0] else: app_name = parts[-3].split(' ')[0] if app_name.startswith('sceneID'): app_name = app_name[8:] #if app_name.endswith('-default'): # app_name = app_name[:-8] dash_pos = app_name.find('-') if dash_pos > 0: app_name = app_name[0:dash_pos] jpg_path = os.path.join(report_folder, app_name + '_' + parts[-1]) if shutil.copy2(file_found, jpg_path): last_modified_date = datetime.datetime.fromtimestamp( os.path.getmtime(file_found)) data_list.append( [app_name, file_found, last_modified_date, jpg_path]) if len(data_list): description = "Snapshots saved by iOS for individual apps appear here. Blank screenshots are excluded here. Dates and times shown are from file modified timestamps" report = ArtifactHtmlReport('App Snapshots (screenshots)') report.start_artifact_report(report_folder, 'App Snapshots', description) report.add_script() report_folder_name = os.path.basename(report_folder.rstrip(slash)) data_list_for_report = [] for app_name, ktx_path, mod_date, png_path in data_list: dir_path, base_name = os.path.split(png_path) img_html = '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid" style="max-height:300px; max-width:400px"></a>'.format( base_name, report_folder_name) data_list_for_report.append( (app_name, ktx_path, mod_date, img_html)) report.write_artifact_data_table(data_headers, data_list_for_report, '', html_escape=False, write_location=False) report.end_artifact_report() tsvname = 'App Snapshots' tsv_headers = ('App Name', 'Source Path', 'Date Modified') tsv(report_folder, tsv_headers, data_list, tsvname) else: logfunc('No snapshots available')
def write_artifact_data_table(self, data_headers, data_list, source_path, write_total=True, write_location=True, html_escape=True, cols_repeated_at_bottom=True, table_responsive=True, table_style='', table_id='dtBasicExample', html_no_escape=[]): ''' Writes info about data, then writes the table to html file Parameters ---------- data_headers : List/Tuple of table column names data_list : List/Tuple of lists/tuples which contain rows of data source_path : Source path of data write_total : Toggles whether to write out a line of total rows written write_location : Toggles whether to write the location of data source html_escape : If True (default), then html special characters are encoded cols_repeated_at_bottom : If True (default), then col names are also at the bottom of the table table_responsive : If True (default), div class is table_responsive table_style : Specify table style like "width: 100%;" table_id : Specify an identifier string, which will be referenced in javascript html_no_escape : if html_escape=True, list of columns not to escape ''' if (not self.report_file): raise ValueError('Output report file is closed/unavailable!') num_entries = len(data_list) if write_total: self.write_minor_header(f'Total number of entries: {num_entries}', 'h6') if write_location: if is_platform_windows(): source_path = source_path.replace('/', '\\') if source_path.startswith('\\\\?\\'): source_path = source_path[4:] self.write_lead_text( f'{self.artifact_name} located at: {source_path}') self.report_file.write('<br />') if table_responsive: self.report_file.write("<div class='table-responsive'>") table_head = '<table id="{}" class="table table-striped table-bordered table-xsm" cellspacing="0" {}>'\ '<thead>'.format(table_id, (f'style="{table_style}"') if table_style else '') self.report_file.write(table_head) self.report_file.write('<tr>' + ''.join( ('<th class="th-sm">{}</th>'.format(html.escape(str(x))) for x in data_headers)) + '</tr>') self.report_file.write('</thead><tbody>') if html_escape: for row in data_list: if html_no_escape: self.report_file.write('<tr>' + ''.join( ('<td>{}</td>'.format( html. escape(str(x) if x not in [None, 'N/A'] else '') ) if h not in html_no_escape else '<td>{}</td>'. format(str(x) if x not in [None, 'N/A'] else '') for x, h in zip(row, data_headers))) + '</tr>') else: self.report_file.write('<tr>' + ''.join( ('<td>{}</td>'.format( html.escape( str(x) if x not in [None, 'N/A'] else '')) for x in row)) + '</tr>') else: for row in data_list: self.report_file.write('<tr>' + ''.join(('<td>{}</td>'.format( str(x) if x not in [None, 'N/A'] else '') for x in row)) + '</tr>') self.report_file.write('</tbody>') if cols_repeated_at_bottom: self.report_file.write('<tfoot><tr>' + ''.join( ('<th>{}</th>'.format(html.escape(str(x))) for x in data_headers)) + '</tr></tfoot>') self.report_file.write('</table>') if table_responsive: self.report_file.write("</div>")
def process_recentactivity(folder, uid, report_folder): slash = '\\' if is_platform_windows() else '/' db = sqlite3.connect( os.path.join(report_folder, 'RecentAct_{}.db'.format(uid))) cursor = db.cursor() #Create table recent. cursor.execute(''' CREATE TABLE recent(task_id TEXT, effective_uid TEXT, affinity TEXT, real_activity TEXT, first_active_time TEXT, last_active_time TEXT, last_time_moved TEXT, calling_package TEXT, user_id TEXT, action TEXT, component TEXT, snap TEXT,recimg TXT, fullat1 TEXT, fullat2 TEXT) ''') db.commit() err = 0 if report_folder[-1] == slash: folder_name = os.path.basename(report_folder[:-1]) else: folder_name = os.path.basename(report_folder) for filename in glob.iglob(os.path.join(folder, 'recent_tasks', '**'), recursive=True): if os.path.isfile(filename): # filter dirs file_name = os.path.basename(filename) #logfunc(filename) #logfunc(file_name) #numid = file_name.split('_')[0] try: ET.parse(filename) except ET.ParseError: logfunc('Parse error - Non XML file? at: ' + filename) err = 1 #print(filename) if err == 1: err = 0 continue else: tree = ET.parse(filename) root = tree.getroot() #print('Processed: '+filename) for child in root: #All attributes. Get them in using json dump thing fullat1 = json.dumps(root.attrib) task_id = (root.attrib.get('task_id')) effective_uid = (root.attrib.get('effective_uid')) affinity = (root.attrib.get('affinity')) real_activity = (root.attrib.get('real_activity')) first_active_time = (root.attrib.get('first_active_time')) last_active_time = (root.attrib.get('last_active_time')) last_time_moved = (root.attrib.get('last_time_moved')) calling_package = (root.attrib.get('calling_package')) user_id = (root.attrib.get('user_id')) #print(root.attrib.get('task_description_icon_filename')) #All attributes. Get them in using json dump thing fullat2 = json.dumps(child.attrib) action = (child.attrib.get('action')) component = (child.attrib.get('component')) icon_image_path = ( root.attrib.get('task_description_icon_filename')) #Snapshot section picture snapshot = task_id + '.jpg' #print(snapshot) #check for image in directories check1 = os.path.join(folder, 'snapshots', snapshot) isit1 = os.path.isfile(check1) if isit1: #copy snaphot image to report folder shutil.copy2(check1, report_folder) #snap = r'./snapshots/' + snapshot snap = snapshot else: snap = 'NO IMAGE' #Recent_images section if icon_image_path is not None: recent_image = os.path.basename(icon_image_path) check2 = os.path.join(folder, 'recent_images', recent_image) isit2 = os.path.isfile(check2) if isit2: shutil.copy2(check2, report_folder) #recimg = r'./recent_images/' + recent_image recimg = recent_image else: recimg = 'NO IMAGE' else: #check for other files not in the XML - all types check3 = glob.glob( os.path.join(folder, 'recent_images', task_id, '*.*')) if check3: check3 = check3[0] isit3 = os.path.isfile(check3) else: isit3 = 0 if isit3: shutil.copy2(check3, report_folder) recimg = os.path.basename(check3) else: recimg = 'NO IMAGE' #else: # recimg = 'NO IMAGE' #insert all items in database cursor = db.cursor() datainsert = ( task_id, effective_uid, affinity, real_activity, first_active_time, last_active_time, last_time_moved, calling_package, user_id, action, component, snap, recimg, fullat1, fullat2, ) cursor.execute( 'INSERT INTO recent (task_id, effective_uid, affinity, real_activity, first_active_time, last_active_time, last_time_moved, calling_package, user_id, action, component, snap, recimg, fullat1, fullat2) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)', datainsert) db.commit() report = ArtifactHtmlReport('Recent Tasks, Snapshots & Images') report.start_artifact_report(report_folder, f'Recent Activity_{uid}') report.add_script() data_headers = ('Key', 'Value') image_data_headers = ('Snapshot_Image', 'Recent_Image') #Query to create report db = sqlite3.connect( os.path.join(report_folder, 'RecentAct_{}.db'.format(uid))) cursor = db.cursor() #Query to create report cursor.execute(''' SELECT task_id as Task_ID, effective_uid as Effective_UID, affinity as Affinity, real_activity as Real_Activity, datetime(first_active_time/1000, 'UNIXEPOCH') as First_Active_Time, datetime(last_active_time/1000, 'UNIXEPOCH') as Last_Active_Time, datetime(last_time_moved/1000, 'UNIXEPOCH') as Last_Time_Moved, calling_package as Calling_Package, user_id as User_ID, action as Action, component as Component, snap as Snapshot_Image, recimg as Recent_Image FROM recent ''') all_rows = cursor.fetchall() colnames = cursor.description for row in all_rows: if row[2] is None: row2 = '' #'NO DATA' else: row2 = row[2] report.write_minor_header(f'Application: {row2}') #do loop for headers data_list = [] for x in range(0, 13): if row[x] is None: pass else: data_list.append((colnames[x][0], str(row[x]))) report.write_artifact_data_table(data_headers, data_list, folder, table_id='', write_total=False, write_location=False, cols_repeated_at_bottom=False) image_data_row = [] image_data_list = [image_data_row] if row[11] == 'NO IMAGE': image_data_row.append('No Image') else: image_data_row.append( '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" alt="{0}"></a>' .format(str(row[11]), folder_name)) if row[12] == 'NO IMAGE': image_data_row.append('No Image') else: image_data_row.append( '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid z-depth-2 zoom" style="max-height: 400px" alt="{0}"></a>' .format(str(row[12]), folder_name)) report.write_artifact_data_table(image_data_headers, image_data_list, folder, table_id='', table_style="width: auto", write_total=False, write_location=False, html_escape=False, cols_repeated_at_bottom=False) report.write_raw_html('<br />') report.end_artifact_report()
def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): recents = [] for file_found in files_found: file_found = str(file_found) if file_found.endswith('.jpg'): continue # Skip jpg files, all others should be protobuf elif file_found.find('{0}mirror{0}'.format(slash)) >= 0: # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data continue elif os.path.isdir(file_found): # skip folders continue with open(file_found, 'rb') as f: pb = f.read() types = { '1': { 'type': 'message', 'message_typedef': { '1': { 'type': 'uint', 'name': 'id' }, '4': { 'type': 'uint', 'name': 'timestamp1' }, '5': { 'type': 'str', 'name': 'search-query' }, '7': { 'type': 'message', 'message_typedef': { '1': { 'type': 'str', 'name': 'url' }, '2': { 'type': 'str', 'name': 'url-domain' }, '3': { 'type': 'str', 'name': 'title' } }, 'name': 'page' }, '8': { 'type': 'message', 'message_typedef': { '1': { 'type': 'str', 'name': 'category' }, '2': { 'type': 'str', 'name': 'engine' } }, 'name': 'search' }, '9': { 'type': 'int', 'name': 'screenshot-id' }, '17': { 'type': 'uint', 'name': 'timestamp2' }, }, 'name': '' } } values, types = blackboxprotobuf.decode_message(pb, types) items = values.get('1', None) if items: if isinstance(items, dict): # this means only one element was found # No array, just a dict of that single element recents.append((file_found, [items])) else: # Array of dicts found recents.append((file_found, items)) if report_folder[-1] == slash: folder_name = os.path.basename(report_folder[:-1]) else: folder_name = os.path.basename(report_folder) recent_entries = len(recents) if recent_entries > 0: description = "Recently searched terms from the Google Search widget and webpages read from Google app (previously known as 'Google Now') appear here." report = ArtifactHtmlReport('Google Now & Quick Search recent events') report.start_artifact_report(report_folder, 'Recent Searches & Google Now', description) report.add_script() data_headers = ('Screenshot', 'Protobuf Data') data_list = [] for file_path, items in recents: dir_path, base_name = os.path.split(file_path) for item in items: screenshot_id = str(item.get('screenshot-id', '')) screenshot_file_path = os.path.join( dir_path, f'{base_name}-{screenshot_id}.jpg') if os.path.exists(screenshot_file_path): shutil.copy2(screenshot_file_path, report_folder) img_html = '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid" style="max-height:600px; min-width:300px" title="{0}"></a>'.format( f'{base_name}-{screenshot_id}.jpg', folder_name) platform = is_platform_windows() if platform: img_html = img_html.replace('?', '') recursive_convert_bytes_to_str( item) # convert all 'bytes' to str data_list.append(( img_html, '<pre id="json" style="font-size: 110%">' + escape(json.dumps(item, indent=4)).replace('\\n', '<br>') + '</pre>')) report.write_artifact_data_table(data_headers, data_list, dir_path, html_escape=False) report.end_artifact_report() tsvname = f'google quick search box recent' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No recent quick search or now data available')
def get_permissions(files_found, report_folder, seeker, wrap_text): slash = '\\' if is_platform_windows() else '/' for file_found in files_found: file_found = str(file_found) data_list_permission_trees = [] data_list_permissions = [] data_list_packages_su = [] err = 0 user = '' parts = file_found.split(slash) if 'mirror' in parts: user = '******' if user == 'mirror': continue else: try: if (checkabx(file_found)): multi_root = False tree = abxread(file_found, multi_root) else: tree = ET.parse(file_found) except ET.ParseError: logfunc('Parse error - Non XML file.') err = 1 if err == 0: root = tree.getroot() for elem in root: #print('TAG LVL 1 '+ elem.tag, elem.attrib) #role = elem.attrib['name'] #print() if elem.tag == 'permission-trees': for subelem in elem: #print(elem.tag +' '+ subelem.tag, subelem.attrib) data_list_permission_trees.append( (subelem.attrib.get('name', ''), subelem.attrib.get('package', ''))) elif elem.tag == 'permissions': for subelem in elem: data_list_permissions.append( (subelem.attrib.get('name', ''), subelem.attrib.get('package', ''), subelem.attrib.get('protection', ''))) #print(elem.tag +' '+ subelem.tag, subelem.attrib) else: for subelem in elem: if subelem.tag == 'perms': for sub_subelem in subelem: #print(elem.tag, elem.attrib['name'], sub_subelem.attrib['name'], sub_subelem.attrib['granted'] ) data_list_packages_su.append( (elem.tag, elem.attrib.get('name', ''), sub_subelem.attrib.get('name', ''), sub_subelem.attrib.get('granted', ''))) if len(data_list_permission_trees) > 0: report = ArtifactHtmlReport('Permission Trees') report.start_artifact_report(report_folder, f'Permission Trees') report.add_script() data_headers = ('Name', 'Package') report.write_artifact_data_table( data_headers, data_list_permission_trees, file_found) report.end_artifact_report() tsvname = f'Permission Trees' tsv(report_folder, data_headers, data_list_permission_trees, tsvname) if len(data_list_permissions) > 0: report = ArtifactHtmlReport('Permissions') report.start_artifact_report(report_folder, f'Permissions') report.add_script() data_headers = ('Name', 'Package', 'Protection') report.write_artifact_data_table(data_headers, data_list_permissions, file_found) report.end_artifact_report() tsvname = f'Permissions' tsv(report_folder, data_headers, data_list_permissions, tsvname) if len(data_list_packages_su) > 0: report = ArtifactHtmlReport('Package and Shared User') report.start_artifact_report(report_folder, f'Package and Shared User') report.add_script() data_headers = ('Type', 'Package', 'Permission', 'Granted?') report.write_artifact_data_table(data_headers, data_list_packages_su, file_found) report.end_artifact_report() tsvname = f'Permissions - Packages and Shared User' tsv(report_folder, data_headers, data_list_packages_su, tsvname)