def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) utmpx_path = glob.glob( os.path.join(self.options.inputdir, 'private/var/run/utmpx')) # This is a string version of the struct format # https://opensource.apple.com/source/Libc/Libc-1158.50.2/include/NetBSD/utmpx.h.auto.html # https://github.com/libyal/dtformats/blob/master/documentation/Utmp%20login%20records%20format.asciidoc # https://stackoverflow.com/questions/17244488/reading-struct-in-python-from-created-struct-in-c UTMPX_STR = "256s4s32sih2xii256s64x" UTMPX_STR_SIZE = calcsize(UTMPX_STR) UTMPX_BUFFER_SIZE = 628 if len(utmpx_path) == 0: self.log.debug("File not found: {0}".format(utmpx_path)) for path in utmpx_path: with open(path, 'rb') as file: # set the headers and write it out. record = OrderedDict((h, '') for h in self._headers) # Read out header section, but we'll discard it for now. header = file.read(UTMPX_BUFFER_SIZE) # print header # Loop through the rest of the records. # First record is always boot time. while True: buf = file.read(UTMPX_BUFFER_SIZE) if len(buf) != UTMPX_STR_SIZE: break # Write out the fields user, id, terminal_type, pid, logon_code, epoch, usec, host_id = unpack_from( UTMPX_STR, buf) # Combine the timestamp fields combo_time = datetime.datetime.utcfromtimestamp( epoch) + datetime.timedelta(microseconds=usec) utc_combo = pytz.utc.localize(combo_time) timestamp_formatted = utc_combo.strftime( '%Y-%m-%dT%H:%M:%S.%fZ') if host_id.rstrip('\x00') == '': host = "localhost" else: host = host_id.rstrip('\x00') # Convert them to an OrderedDict and then create Values View record['user'] = user.rstrip('\x00') record['id'] = id record['terminal_type'] = terminal_type.rstrip('\x00') record['pid'] = pid record['logon_type'] = self._decode_logon(logon_code) record['timestamp'] = timestamp_formatted record['hostname'] = host line = record.values() # print values output.write_entry(line)
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) user_inputdir = glob.glob( os.path.join(self.options.inputdir, "Users/*")) user_inputdir.append(os.path.join(self.options.inputdir, "var/root")) record = OrderedDict((h, '') for h in self._headers) for user_home in user_inputdir: record['user'] = os.path.basename(user_home) # Gather known_hosts and authorized_users files for the user. kh_path = os.path.join(user_home, '.ssh/known_hosts') u_knownhosts = glob.glob(kh_path) ak_path = os.path.join(user_home, '.ssh/authorized_keys') u_authorizedkeys = glob.glob(ak_path) # Combine all files found into one list per user. u_ssh_all = u_knownhosts + u_authorizedkeys # Define the directory checked for use in debug messages. user_ssh_dir = os.path.join(user_home, '.ssh/') # Generate debug messages for files not found. if len(u_knownhosts) == 0: self.log.debug("File not found: {0}".format(kh_path)) if len(u_authorizedkeys) == 0: self.log.debug("File not found: {0}".format(ak_path)) # Iterate over files found and parse them using ssh-keygen. for item in u_ssh_all: p, e = subprocess.Popen(["ssh-keygen", "-l", "-f", item], stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate() record['src_name'] = os.path.basename(item) if not e and not "is not a public key file" in p: p = p.split('\n') p = [x for x in p if len(x) > 0] for i in p: data = i.split(' ') record['bits'] = data[0] record['fingerprint'] = data[1] record['host'] = data[2] record['keytype'] = data[3] line = record.values() output.write_entry(line) elif "is not a public key file" in p: self.log.debug("Could not parse {0}: {1}".format(file, p))
def run(self): if ("Volumes" not in self.options.inputdir and self.options.forensic_mode is False): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) netstat_out, e = subprocess.Popen( ["netstat", "-f", "inet", "-n"], stdout=subprocess.PIPE).communicate() if e: pass else: netstat = netstat_out.encode('utf-8').split('\n') for l in netstat: if not (l.startswith("Active") or l.startswith("Proto") or len(l) == 0): item = [ x.lstrip(' ') for x in filter(None, l.split(' ')) ] protocol = item[0] recv_q = item[1] send_q = item[2] try: src_ip = '.'.join(item[3].split('.')[0:4]) except: src_ip = "ERROR" try: src_port = item[3].split('.')[-1] except: src_port = "ERROR" try: dst_ip = '.'.join(item[4].split('.')[0:4]) except: dst_ip = "ERROR" try: dst_port = item[4].split('.')[-1] except: dst_port = "ERROR" if len(item) == 6: state = item[5] else: state = "" line = [ protocol, recv_q, send_q, src_ip, src_port, dst_ip, dst_port, state ] output.write_entry(line) else: self.log.error("Module did not run: input is not a live system!")
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) user_inputdir = glob.glob( os.path.join(self.options.inputdir, "Users/*")) user_inputdir.append(os.path.join(self.options.inputdir, "var/root")) for user_home in user_inputdir: # Get username from path. user = os.path.basename(user_home) # Get all _history files in root of user directory. bash_loc = os.path.join(user_home, '.*_history') u_bash = glob.glob(bash_loc) if len(u_bash) == 0: self.log.debug("Files not found in: {0}".format(bash_loc)) # Get all bash_sessions files .bash_sessions directory. bash_sess_loc = os.path.join(user_home, '.bash_sessions/*') u_bash_sess = glob.glob(bash_sess_loc) if len(u_bash_sess) == 0: self.log.debug("Files not found in: {0}".format(bash_sess_loc)) # Combine all files into a list and parse them iteratively. if len(u_bash) != 0 or len(u_bash_sess) != 0: u_bash_all = u_bash + u_bash_sess for sess in u_bash_all: out = stats2(sess) sess = open(sess, 'r').readlines() indexer = 0 for line in sess: record = OrderedDict((h, '') for h in self._headers) for i in self._headers: if i in out: record[i] = out[i] record['src_file'] = out['name'] record['user'] = user record['cmd'] = line.rstrip() indexer += 1 record['item_index'] = indexer output.write_entry(record.values())
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) qevents_loc = os.path.join( self.options.inputdir, 'Users/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2' ) qevents_list = glob.glob(qevents_loc) qry = 'SELECT * FROM LSQuarantineEvent' if len(qevents_list) == 0: self.log.debug("Files not found in: {0}".format(qevents_loc)) for i in qevents_list: data = query_db(i, qry, self.options.outputdir) userpath = i.split('/') userindex = userpath.index('Users') + 1 user = userpath[userindex] for item in data: item = list(item) record = OrderedDict((h, '') for h in self._headers) record['user'] = user record['timestamp'] = cocoa_time(item[1]) record['bundle_id'] = item[2] record['quarantine_agent'] = item[3] record['download_url'] = item[4] record['sender_name'] = item[5] record['sender_address'] = item[6] record['typeno'] = str(item[7]) record['origin_title'] = item[8] record['origin_url'] = item[9] record['origin_alias'] = item[10] line = [ x.encode('utf-8') if isinstance(x, unicode) else x for x in record.values() ] output.write_entry(line)
def run(self): if ("Volumes" not in self.options.inputdir and self.options.forensic_mode is False): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) #encoding = locale.getpreferredencoding(True) names = OrderedDict( zip( 'cpRLftDsian', 'command pid ppid user fd type device_no size inode access name' .split())) lsof = Popen(["lsof", "-n", "-P", "-F{}0".format(''.join(names))], stdout=PIPE, bufsize=-1) for line in lsof.stdout: fields = { f[:1].decode('ascii', 'strict'): f[1:] for f in line.split(b'\0') if f.rstrip(b'\n') } if 'p' in fields: process_info = fields elif 'f' in fields: fields.update(process_info) result = OrderedDict( (name, fields.get(id)) for id, name in names.items()) line = [v for k, v in result.items()] output.write_entry(line) lsof.communicate() else: self.log.error("Module did not run: input is not a live system!")
def run(self): if ("Volumes" not in self.options.inputdir and self.options.forensic_mode is False): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) os.environ['TZ'] = 'UTC0' ps_out, e = subprocess.Popen( ["ps", "-Ao", "pid,ppid,user,stat,lstart,time,command"], stdout=subprocess.PIPE).communicate() if e: pass else: pslist = ps_out.decode('utf-8').split('\n') for l in pslist: if "PID" not in l and len(l) > 0: item = [ x.lstrip(' ') for x in filter(None, l.split(' ')) ] pid = item[0] ppid = item[1] user = item[2] state = item[3] proc_start = parser.parse(' '.join( item[5:9])).replace(tzinfo=None).isoformat() + 'Z' runtime = item[9] cmd = ' '.join(item[10:]).encode("utf-8") line = [ pid, ppid, user, state, proc_start, runtime, cmd ] output.write_entry(line) else: self.log.error("Module did not run: input is not a live system!")
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) user_inputdir = glob.glob( os.path.join(self.options.inputdir, "Users/*")) user_inputdir.append(os.path.join(self.options.inputdir, "var/root")) spotlight_path = 'Library/Application Support/com.apple.spotlight.Shortcuts' for user_home in user_inputdir: sl_path = os.path.join(user_home, spotlight_path) u_spotlight = glob.glob(sl_path) if len(u_spotlight) == 0: self.log.debug("File not found: {0}".format(sl_path)) for item in u_spotlight: try: spotlight_data = plistlib.readPlist(item) for k, v in spotlight_data.items(): user = os.path.basename(user_home) shortcut = k display_name = spotlight_data[k]['DISPLAY_NAME'] last_used = spotlight_data[k]['LAST_USED'].isoformat( ) + "Z" url = spotlight_data[k]['URL'] line_raw = [ user, shortcut, display_name, last_used, url ] line = [x.encode('utf-8') for x in line_raw] output.write_entry(line) except Exception: self.log.error("Could not parse: {0}".format(file))
class MRUModule(AutoMacTCModule): _mod_filename = __name__ _headers = [ 'src_file', 'src_name', 'item_index', 'order', 'name', 'url', 'source_key' ] def __init__(self, *args, **kwargs): super(MRUModule, self).__init__(*args, **kwargs) self._output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) def _parse_sfls(self): sfl_list = multiglob(self.options.inputdir, [ 'Users/*/Library/Application Support/com.apple.sharedfilelist/*.sfl', 'Users/*/Library/Application Support/com.apple.sharedfilelist/*/*.sfl' ]) for mru_file in sfl_list: plist_objects = ccl_bplist.deserialise_NsKeyedArchiver( ccl_bplist.load(open(mru_file, "rb")), parse_whole_structure=True) try: if plist_objects["root"]["NS.objects"][1]["NS.keys"][ 0] == "com.apple.LSSharedFileList.MaxAmount": numberOfItems = plist_objects["root"]["NS.objects"][1][ "NS.objects"][0] except Exception: pass try: if plist_objects["root"]["NS.keys"][2] == "items": items = plist_objects["root"]["NS.objects"][2][ "NS.objects"] except Exception: self.log.debug('Could not parse SFL {0}: {1}'.format( mru_file, [traceback.format_exc()])) items = None if items: for n, item in enumerate(items): record = OrderedDict((h, '') for h in self._headers) record['src_file'] = mru_file record['src_name'] = "SharedFileList" try: try: name = item["name"].encode('utf-8') except Exception: name = '' record['name'] = name record['item_index'] = str(n) record['order'] = item['order'] record['url'] = item['URL']['NS.relative'] except Exception: self.log.debug( "Could not parse SFL item: {0}".format(item)) self._output.write_entry(record.values()) def _parse_sfl2s(self): sfl2_list = multiglob(self.options.inputdir, [ 'Users/*/Library/Application Support/com.apple.sharedfilelist/*.sfl2', 'Users/*/Library/Application Support/com.apple.sharedfilelist/*/*.sfl2' ]) for mru_file in sfl2_list: plist_objects = ccl_bplist.deserialise_NsKeyedArchiver( ccl_bplist.load(open(mru_file, "rb")), parse_whole_structure=True) try: if plist_objects["root"]["NS.objects"][1]["NS.keys"][ 0] == "com.apple.LSSharedFileList.MaxAmount": numberOfItems = plist_objects["root"]["NS.objects"][1][ "NS.objects"][0] except Exception: pass try: if plist_objects["root"]["NS.keys"][0] == "items": items = plist_objects["root"]["NS.objects"][0][ "NS.objects"] except Exception: self.log.debug('Could not parse SFL {0}: {1}'.format( mru_file, [traceback.format_exc()])) items = None if items: for n, item in enumerate(items): record = OrderedDict((h, '') for h in self._headers) record['src_file'] = mru_file record['src_name'] = "SharedFileList" try: attribute_keys = plist_objects["root"]["NS.objects"][ 0]["NS.objects"][n]["NS.keys"] attribute_values = plist_objects["root"]["NS.objects"][ 0]["NS.objects"][n]["NS.objects"] attributes = dict(zip(attribute_keys, attribute_values)) try: name = str(attributes['Name']).encode('utf-8') except: name = '' if 'Bookmark' in attributes: try: url = [ 'file://' + x.split(';')[-1] for x in attributes['Bookmark'].split('\x00') if x != '' and ';' in x ][0] except: try: url = ', '.join([ 'file://' + x.split(';')[-1] for x in [ x for x in attributes['Bookmark'] ['NS.data'].split('\x00') if x != '' and ';' in x ] ]) except: try: url = [ x for x in attributes['Bookmark']. split('\x00') if x != '' and x.startswith('x') ][0] except: url = 'ERROR-COULDNOTPARSE' else: url = 'ERROR-NODATA' record['item_index'] = str(n) record['name'] = name record['url'] = url except Exception: self.log.debug( "Could not parse SFL item: {0}".format(item)) self._output.write_entry(record.values()) def _parse_securebookmarks(self): secure_bookmarks = multiglob(self.options.inputdir, [ 'Users/*/Library/Containers/*/Data/Library/Preferences/*.securebookmarks.plist' ]) for secure_bookmark_file in secure_bookmarks: try: data = plistlib.readPlist(secure_bookmark_file) except Exception: self.log.debug( 'Could not parse securebookmark file {0}: {1}'.format( secure_bookmark_file, [traceback.format_exc()])) data = None if data: for k, v in data.items(): record = OrderedDict((h, '') for h in self._headers) record['src_file'] = secure_bookmark_file record['src_name'] = "SecureBookmarks" try: record['url'] = k record['name'] = k.split('/')[-1].encode('utf-8') except Exception: self.log.debug( "Could not parse securebookmark item for key: {0}". format(k)) self._output.write_entry(record.values()) def _parse_sidebarplists(self): sidebar_plists = multiglob( self.options.inputdir, ['Users/*/Library/Preferences/com.apple.sidebarlists.plist']) for sblist in sidebar_plists: try: data = read_bplist(sblist)[0] except Exception: self.log.debug('Could not parse sidebarplist {0}: {1}'.format( sblist, [traceback.format_exc()])) data = None if data: for i in data['systemitems']['VolumesList']: record = OrderedDict((h, '') for h in self._headers) record['src_file'] = sblist record['src_name'] = "SidebarPlist" try: record['name'] = i['Name'].encode('utf-8') if 'Bookmark' in i: record['url'] = 'file:///' + str( i['Bookmark']).split('file:///')[1].split( '\x00')[0] record['source_key'] = 'VolumesList' except: self.log.debug( "Could not parse sidebarplist item: {0}".format(i)) self._output.write_entry(record.values()) def _parse_finderplists(self): finder_plists = multiglob( self.options.inputdir, ['Users/*/Library/Preferences/com.apple.finder.plist']) for fplist in finder_plists: try: data = read_bplist(fplist)[0] except Exception: self.log.debug('Could not parse finderplist {0}: {1}'.format( fplist, [traceback.format_exc()])) data = None if data: try: recentfolders = data['FXRecentFolders'] except KeyError: self.log.debug( "Could not find FXRecentFolders key in plist.") recentfolders = [] try: moveandcopy = data['RecentMoveAndCopyDestinations'] except KeyError: self.log.debug( "Could not find FXRecentFolders key in plist.") moveandcopy = [] for i in recentfolders: record = OrderedDict((h, '') for h in self._headers) record['src_file'] = fplist record['src_name'] = "FinderPlist" try: record['source_key'] = 'FXRecentFolders' record['name'] = i['name'].encode('utf-8') bkmk = i['file-bookmark'] record['url'] = 'file:///' + str(bkmk).split( ';')[-1].split('\x00')[0] except Exception: self.log.debug( "Could not parse finderplist item: {0}".format(i)) self._output.write_entry(record.values()) for i in moveandcopy: record = OrderedDict((h, '') for h in self._headers) record['src_file'] = fplist record['src_name'] = fplist try: record['url'] = i record['name'] = i.split('/')[-2].encode('utf-8') record['source_key'] = 'RecentMoveAndCopyDestinations' except Exception: self.log.debug( "Could not parse finderplist item: {0}: {1}". format(i, [traceback.format_exc()])) self._output.write_entry(record.values()) def run(self): self._parse_sfls() self._parse_sfl2s() self._parse_securebookmarks() self._parse_sidebarplists() self._parse_finderplists()
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) q_loc = os.path.join( self.options.inputdir, 'private/var/folders/*/*/C/com.apple.QuickLook.thumbnailcache/index.sqlite' ) qlist = glob.glob(q_loc) if self.options.os_version is not None: ver = float('.'.join(self.options.os_version.split('.')[1:])) if ver > 14.0 and ver is not None and self.options.forensic_mode is not True: self.log.error( "Artifacts are inaccessible on and above OS version 10.14 on live systems." ) return else: if self.options.forensic_mode is not True: self.log.debug( "OSVersion not detected, but going to try to parse anyway." ) else: self.log.error( "OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems." ) return if len(qlist) == 0: self.log.debug("Files not found in: {0}".format(q_loc)) ql_sql = 'SELECT distinct k.folder, k.file_name, t.hit_count, t.last_hit_date, k.version \ FROM (SELECT rowid AS f_rowid,folder,file_name,version FROM files) k \ LEFT JOIN thumbnails t ON t.file_id = k.f_rowid ORDER BY t.hit_count DESC' for qfile in qlist: uid = stats2(qfile)['uid'] data = query_db(qfile, ql_sql, self.options.outputdir) for item in data: item = list(item) record = OrderedDict((h, '') for h in self._headers) record['uid'] = uid record['path'] = item[0].encode('utf-8') record['name'] = item[1].encode('utf-8') if item[3]: record['last_hit_date'] = cocoa_time(item[3]) else: record['last_hit_date'] = '' if item[2]: record['hit_count'] = item[2] else: record['hit_count'] = '' try: plist_array = read_stream_bplist(item[4]) record['file_last_modified'] = cocoa_time( plist_array['date']) record['generator'] = plist_array['gen'] try: record['file_size'] = int(plist_array['size']) except KeyError: record['file_size'] = 'Error' except Exception: self.log.error( "Could not parse: embedded binary plist for record {0}" .format(record['name'])) output.write_entry(record.values())
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) # Parse the com.apple.preferences.accounts.plist to identify deleted accounts. _deletedusers_plist = os.path.join(self.options.inputdir, 'Library/Preferences/com.apple.preferences.accounts.plist') if not os.path.exists(_deletedusers_plist): self.log.debug("File not found: {0}".format(_deletedusers_plist)) _deletedusers = [] else: try: _deletedusers = read_bplist(_deletedusers_plist)[0]['deletedUsers'] except Exception: self.log.debug("Could not parse: {0}".format(_deletedusers_plist)) _deletedusers = [] for i in range(len(_deletedusers)): record = OrderedDict((h, '') for h in self._headers) record['date_deleted'] = parser.parse(str(_deletedusers[i]['date'])).strftime('%Y-%m-%dT%H:%M:%SZ') record['uniq_id'] = _deletedusers[i]['dsAttrTypeStandard:UniqueID'] record['user'] = _deletedusers[i]['name'] record['real_name'] = _deletedusers[i]['dsAttrTypeStandard:RealName'] # Enumerate users still active on disk. _liveusers_plists = os.path.join(self.options.inputdir, 'private/var/db/dslocal/nodes/Default/users/') try: _liveplists = [i for i in os.listdir(_liveusers_plists) if not i.startswith("_") and i not in ['daemon.plist', 'nobody.plist']] except OSError: self.log.debug("Could not connect [{0}].".format([traceback.format_exc()])) _liveplists = [] _liveusers = glob.glob((os.path.join(self.options.inputdir, 'Users/*'))) _liveusers.append(os.path.join(self.options.inputdir, "var/root")) _admins = os.path.join(self.options.inputdir, 'private/var/db/dslocal/nodes/Default/groups/admin.plist') if not os.path.exists(_admins): self.log.debug("File not found: {0}".format(_admins)) self.log.error("Could not determine admin users.") admins = [] else: try: admins = list(read_bplist(_admins)[0]['users']) except Exception: try: # dscl . -read /Groups/admin GroupMembership admin_users, e = subprocess.Popen(["dscl", ".", "-read", "/Groups/admin", "GroupMembership"], stdout=subprocess.PIPE).communicate() admins = admin_users.split()[1:] except: admins = [] self.log.debug("Could not parse: {0}".format(_admins)) self.log.error("Could not determine admin users.") _loginwindow = os.path.join(self.options.inputdir, 'Library/Preferences/com.apple.loginwindow.plist') if not os.path.exists(_loginwindow): self.log.debug("File not found: {0}".format(_loginwindow)) else: try: lastuser = read_bplist(_loginwindow)[0]['lastUserName'] except Exception: lastuser = "" self.log.debug("Could not parse: {0}".format(_loginwindow)) self.log.error("Could not determine last logged in user.") for user_path in _liveusers: user_home = os.path.basename(user_path) if user_home not in ['.localized', 'Shared']: record = OrderedDict((h, '') for h in self._headers) oMACB = stats2(user_path, oMACB=True) record.update(oMACB) if user_home in admins: record['admin'] = 'Yes' if user_home == lastuser: record['lastloggedin_user'] = '******' _liveplists = [] record['user'] = user_home if len(_liveplists) > 0: i_plist = user_home + '.plist' if i_plist in _liveplists: i_plist_array = read_bplist(os.path.join(_liveusers_plists, i_plist))[0] record['uniq_id'] = i_plist_array['uid'][0] record['real_name'] = i_plist_array['realname'][0] elif 'Volumes' not in self.options.inputdir and self.options.forensic_mode != 'True': user_ids, e = subprocess.Popen(["dscl", ".", "-list", "Users", "UniqueID"], stdout=subprocess.PIPE).communicate() for i in user_ids.split('\n'): data = i.split(' ') if record['user'] == data[0]: record['uniq_id'] = data[-1] real_name, e = subprocess.Popen(["finger", record['user']], stdout=subprocess.PIPE, stderr=open(os.devnull, 'w')).communicate() names = [i for i in real_name.split('\n') if i.startswith("Login: ")] for i in names: if ' '+record['user'] in i: r_name =[i for i in i.split('\t') if i.startswith('Name: ')][0].split()[1:] record['real_name'] = ' '.join(r_name) output.write_entry(record.values())
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) if self.options.os_version is not None: ver = float('.'.join(self.options.os_version.split('.')[1:])) if ver < 13: self.log.error("Artifacts are not present below OS version 10.13.") return else: self.log.debug("OSVersion not detected, but going to try to parse anyway.") analytics_location = multiglob(self.options.inputdir, ['Library/Logs/DiagnosticReports/Analytics*.core_analytics', 'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics']) if len(analytics_location) < 1: self.log.debug("No .core_analytics files found.") else: self.log.debug("Found {0} .core_analytics files to parse.".format(len(analytics_location))) counter = 0 for file in analytics_location: data = open(file, 'r').read() data_lines = [json.loads(i) for i in data.split('\n') if i.startswith("{\"message\":")] try: diag_start = [json.loads(i) for i in data.split('\n') if i.startswith("{\"_marker\":") and "end-of-file" not in i][0]['startTimestamp'] except ValueError: diag_start = "ERROR" try: diag_end = [json.loads(i) for i in data.split('\n') if i.startswith("{\"timestamp\":")][0]['timestamp'] diag_end = str(parser.parse(diag_end).astimezone(pytz.utc)) diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z') except ValueError: diag_end = "ERROR" for i in data_lines: record = OrderedDict((h, '') for h in self._headers) record['src_report'] = file record['diag_start'] = diag_start record['diag_end'] = diag_end record['name'] = i['name'] record['uuid'] = i['uuid'] # If any fields not currently recorded (based on the headers above) appear, # they will be added to overflow. record['overflow'] = {} for k, v in i['message'].items(): if k in record.keys(): record[k] = i['message'][k] else: record['overflow'].update({k: v}) if len(record['overflow']) == 0: record['overflow'] = '' if record['uptime'] != '': record['uptime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(record['uptime'])) if record['activeTime'] != '': record['activeTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(record['activeTime'])) if record['powerTime'] != '': record['powerTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(record['powerTime'])) if record['appDescription'] != '': record['appName'] = record['appDescription'].split(' ||| ')[0] record['appVersion'] = record['appDescription'].split(' ||| ')[1] line = record.values() output.write_entry(line) counter += 1 # Parse aggregate files either from their directory on disk. agg_location = glob.glob(os.path.join(self.options.inputdir,'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849')) if ver > 13.6: self.log.debug("Cannot currently parse aggregate file above OS version 10.13.6.") return if len(agg_location) < 1: self.log.debug("No aggregate files found.") else: self.log.debug("Found {0} aggregate files to parse.".format(len(agg_location))) for aggregate in agg_location: data = open(aggregate, 'r').read() obj_list = data.split('\n') if len(obj_list) > 1: obj = [i for i in obj_list if '[[[' in i][0] try: data_lines = json.loads(obj) except ValueError: try: data_lines = json.loads(json.dumps(list(ast.literal_eval(obj)))) except Exception: data_lines = [] self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()])) except Exception: data_lines = [] self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()])) elif len(obj_list) == 1: obj = obj_list[0] try: data_lines = json.loads(obj) except ValueError: try: data_lines = json.loads(json.dumps(list(ast.literal_eval(obj)))) except Exception: data_lines = [] self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()])) except Exception: data_lines = [] self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()])) else: data_lines = [] self.log.debug("Could not parse aggregate file. File had unusual number of objects to parse: {0}. | {1}".format(str(len(obj_list)), [traceback.format_exc()])) diag_start = stats2(aggregate)['btime'] diag_end = stats2(aggregate)['mtime'] raw = [i for i in data_lines if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7)] for i in raw: record = OrderedDict((h, '') for h in self._headers) record['src_report'] = aggregate record['diag_start'] = diag_start record['diag_end'] = diag_end record['uuid'] = os.path.basename(aggregate) record['processName'] = i[0][0] record['appDescription'] = i[0][1] if record['appDescription'] != '': record['appName'] = record['appDescription'].split(' ||| ')[0] record['appVersion'] = record['appDescription'].split(' ||| ')[1] record['foreground'] = i[0][2] record['uptime'] = i[1][0] record['uptime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][0])) record['activeTime'] = i[1][1] record['activeTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][1])) record['launches'] = i[1][2] record['idleTimeouts'] = i[1][3] record['activations'] = i[1][4] record['activityPeriods'] = i[1][5] record['powerTime'] = i[1][6] record['powerTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][6])) line = record.values() output.write_entry(line) counter += 1 if counter > 0: self.log.debug("Done. Wrote {0} lines.".format(counter))
class FirefoxModule(AutoMacTCModule): _mod_filename = __name__ _downloads_headers = [ 'user', 'profile', 'download_url', 'download_path', 'download_started', 'download_finished', 'download_totalbytes' ] _urls_headers = [ 'user', 'profile', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time', 'typed', 'description' ] def __init__(self, *args, **kwargs): super(FirefoxModule, self).__init__(*args, **kwargs) self.firefox_location = glob.glob( os.path.join(self.options.inputdir, 'Users/*/Library/Application Support/Firefox/Profiles/*.*')) self._downloads_output = DataWriter('browser_firefox_downloads', self._downloads_headers, self.log, self.run_id, self.options) self._urls_output = DataWriter('browser_firefox_history', self._urls_headers, self.log, self.run_id, self.options) def _get_column_headers(self, conn, table): col_headers = conn.cursor().execute('SELECT * from {0}'.format(table)) names = list(map(lambda x: x[0], col_headers.description)) return names def _get_firefox_version(self, firefox_file): verfile = os.path.join(firefox_file, 'compatibility.ini') config = ConfigParser() config.read(verfile) ver = config.get('Compatibility','lastversion') self.log.debug("Firefox Version {0} identified.".format(ver)) return ver def _connect_to_db(self, db_location, main_table): try: self.log.debug("Trying to connect to {0} directly...".format(db_location)) history_db = db_location conn = sqlite3.connect(history_db) test = self._get_column_headers(conn, main_table) self.log.debug("Successfully connected.") except sqlite3.OperationalError: error = [x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError")] self.log.debug("Could not connect [{0}].".format(error[0])) if "database is locked" in error[0]: tmpdb = os.path.basename(db_location)+'-tmp' self.log.debug("Trying to connect to db copied to temp location...") shutil.copyfile(history_db, os.path.join(self.options.outputdir, tmpdb)) history_db = os.path.join(self.options.outputdir, tmpdb) try: conn = sqlite3.connect(history_db) test = self._get_column_headers(history_db, main_table) self.log.debug("Successfully connected.") except sqlite3.OperationalError: error = [x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError")] self.log.debug("Could not connect [{0}].".format(error[0])) if "no such table" in error[0]: self.log.error("Module fatal error: necessary table doesn't exist in database.") history_db = None elif "no such table" in error[0]: self.log.error("Module fatal error: necessary table doesn't exist in database.") history_db = None else: self.log.error("Module fatal error: cannot parse database.") history_db = None return history_db def _pull_download_history(self, conn, user, profile): desired_columns = ['url', 'content', 'dateAdded'] available_columns = self._get_column_headers(conn, 'moz_annos') + self._get_column_headers(conn, 'moz_places') query_columns_list = [i for i in desired_columns if i in available_columns] query_columns = ', '.join([i for i in desired_columns if i in available_columns]) unavailable = ','.join(list(set(desired_columns) - set(query_columns_list))) if len(unavailable) > 0: self.log.debug('The following desired columns are not available in the database: {0}'.format(unavailable)) self.log.debug("Executing sqlite query for download history...") try: downloads_data = conn.cursor().execute( 'SELECT url,group_concat(content),dateAdded FROM moz_annos \ LEFT JOIN moz_places ON moz_places.id = moz_annos.place_id \ GROUP BY place_id' ).fetchall() self.log.debug("Success. Found {0} lines of data.".format(len(downloads_data))) except sqlite3.OperationalError: error = [x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError")] self.log.error('Failed to run query. [{0}]'.format(error[0])) return self.log.debug("Parsing and writing downloads data...") for item in downloads_data: record = OrderedDict((h, '') for h in self._downloads_headers) record['user'] = user record['profile'] = profile record['download_url'] = item[0] record['download_path'] = item[1].split(',')[0] record['download_started'] = firefox_time(item[2]).split('.')[0]+'Z' record['download_finished'] = firefox_time(int(item[1].split(',')[2].split(':')[1])*1000).split('.')[0]+'Z' record['download_totalbytes'] = item[1].split(',')[3].split(':')[1].replace('}','') self._downloads_output.write_entry(record.values()) self.log.debug("Done.") def _pull_visit_history(self, conn, user, profile): desired_columns = ['visit_date','title','url','visit_count','typed','last_visit_date','description'] available_columns = self._get_column_headers(conn, 'moz_places') + self._get_column_headers(conn, 'moz_historyvisits') query_columns_list = [i for i in desired_columns if i in available_columns] query_columns = ', '.join([i for i in desired_columns if i in available_columns]) unavailable = ','.join(list(set(desired_columns) - set(query_columns_list))) if len(unavailable) > 0: self.log.debug('The following desired columns are not available in the database: {0}'.format(unavailable)) self.log.debug("Executing sqlite query for visit history...") try: urls_data = conn.cursor().execute( 'SELECT {0} FROM moz_historyvisits left join moz_places \ on moz_places.id = moz_historyvisits.place_id'.format(query_columns) ).fetchall() self.log.debug("Success. Found {0} lines of data.".format(len(urls_data))) except sqlite3.OperationalError: error = [x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError")] self.log.error('Failed to run query. [{0}]'.format(error[0])) return self.log.debug("Parsing and writing visits data...") nondict = dict.fromkeys(desired_columns) for item in urls_data: record = OrderedDict((h, '') for h in self._urls_headers) item_dict = dict(zip(query_columns_list,item)) nondict.update(item_dict) record['user'] = user record['profile'] = profile record['visit_time'] = firefox_time(nondict['visit_date']).split('.')[0]+'Z' if nondict['title']: record['title'] = nondict['title'].encode('utf-8') record['url'] = nondict['url'] record['visit_count'] = nondict['visit_count'] record['typed'] = nondict['typed'] record['last_visit_time'] = firefox_time(nondict['last_visit_date']).split('.')[0]+'Z' if nondict['description']: record['description'] = nondict['description'].encode('utf-8') self._urls_output.write_entry(record.values()) self.log.debug("Done.") def run(self): for c in self.firefox_location: userpath = c.split('/') userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1 user = userpath[userindex] profileindex = userpath.index('Profiles') + 1 profile = userpath[profileindex] self.log.debug("Starting parsing for Firefox under {0} user.".format(user)) self._get_firefox_version(c) history_db = self._connect_to_db(os.path.join(c, 'places.sqlite'),'moz_places') # If the database cannot be accessed, or if the main table necessary # for parsing (moz_places) is unavailable, # fail gracefully. if history_db: conn = sqlite3.connect(history_db) self._pull_download_history(conn, user, profile) self._pull_visit_history(conn, user, profile) try: os.remove(os.path.join(self.options.outputdir, 'places.sqlite-tmp')) os.remove(os.path.join(self.options.outputdir, 'places.sqlite-tmp-shm')) os.remove(os.path.join(self.options.outputdir, 'places.sqlite-tmp-wal')) except OSError: pass
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) # if there are specific directories to recurse, recurse them. if self.options.dir_include_dirs != ['']: root_list = [] for i in self.options.dir_include_dirs: root_list.append(os.path.join(self.options.inputdir, i)) root_list = list(itertools.chain.from_iterable([glob.glob(i) for i in root_list])) # if there are no specific directories to recurse, recurse from the root of the inputdir. also write the stats data to else: root_list = glob.glob(self.options.inputdir) record = OrderedDict((h, '') for h in self._headers) stat_data = stats2(self.options.inputdir) record.update(stat_data) output.write_entry(record.values()) # by default (if no-defaults is NOT in exclusion flag) exclude the following directories if 'no-defaults' not in self.options.dir_exclude_dirs: if not self.options.forensic_mode: default_exclude = [ '.fseventsd', '.DocumentRevisions-V100', '.Spotlight-V100', 'Users/*/Pictures', 'Users/*/Library/Application Support/AddressBook', 'Users/*/Calendar', 'Users/*/Library/Calendars', 'Users/*/Library/Preferences/com.apple.AddressBook.plist' ] else: default_exclude = ['.fseventsd', '.DocumentRevisions-V100', '.Spotlight-V100'] # if no-defaults is in the exclusion flag, remove no-defaults and use the user-provided exclusion list else: default_exclude = [] self.options.dir_exclude_dirs.remove('no-defaults') # if there are specific directories to exclude, do not recurse them if self.options.dir_exclude_dirs != ['']: exclude_list = [os.path.join(self.options.inputdir, i).strip("/") for i in default_exclude + self.options.dir_exclude_dirs] # if no specific directories are excluded, use default-list (created above) else: exclude_list = [os.path.join(self.options.inputdir, i).strip("/") for i in default_exclude] # if NOT running with -f flag for forensic mode, exclude everything in /Volumes/* to prevent recursion of mounted volumes IN ADDITION to other exclusions. if not self.options.forensic_mode: exclude_list += [i for i in glob.glob(os.path.join(self.options.inputdir, 'Volumes/*'))] exclude_list = multiglob(self.options.inputdir, exclude_list) else: exclude_list = multiglob('/', exclude_list) self.log.debug("The following directories will be excluded from dirlist enumeration: {0}".format(exclude_list)) filePool = ThreadPool(4) for i in root_list: for root, dirs, files in os.walk(i, topdown=True): # prune excluded directories and files to prevent further recursion into them dirs[:] = [d for d in dirs if os.path.join(root,d) not in exclude_list] files[:] = [f for f in files if os.path.join(root,f) not in exclude_list] # do not recurse into bundles that end with any of the file extensions below UNLESS told to at amtc runtime exc_bundles = ('.app', '.framework','.lproj','.plugin','.kext','.osax','.bundle','.driver','.wdgt') if root.strip().endswith(exc_bundles) and not (os.path.basename(root)).startswith('.') and self.options.dir_recurse_bundles == False: dirs[:] = [] files[:] = [] if self.options.dir_no_multithreading: file_data = [self._handle_files(root, file_item) for file_item in files] else: file_data = self._filePooler(root, filePool, files) for record in file_data: wf = record['wherefrom_1'] if wf != ['']: try: parsed_wf = read_stream_bplist(wf) parsed_wf_utf8 = [str(a.encode('utf-8')) for a in parsed_wf if a != ""] except: pathname = os.path.join(record['path'],record['name']) parsed_wf_utf8 = ['ERROR'] self.log.debug("Could not parse embedded binary plist for kMDItemWhereFroms data from file {0}. {1}".format(pathname,[traceback.format_exc()])) if len(parsed_wf_utf8) > 0: record['wherefrom_1'] = parsed_wf_utf8[0] if len(parsed_wf_utf8) > 1: record['wherefrom_2'] = parsed_wf_utf8[1] else: record['wherefrom_1'] = '' else: record['wherefrom_1'] = '' output.write_entry(record.values()) # bundles that will be code-sig checked check_signatures_bundles = ('.app','.kext','.osax') for name in dirs: self._counter += 1 if not self.options.quiet: if self.options.debug: sys.stdout.write('dirlist : INFO Wrote %d lines in %s | FileName: %s \033[K\r' % (self._counter, datetime.now(pytz.UTC)-self.options.start_time, name)) else: sys.stdout.write('dirlist : INFO Wrote %d lines in %s \r' % (self._counter, datetime.now(pytz.UTC)-self.options.start_time)) sys.stdout.flush() # get timestamps and metadata for each file record = OrderedDict((h, '') for h in self._headers) stat_data = stats2(os.path.join(root, name)) record.update(stat_data) # directory is bundle that ends with either of the three extensions, check its code signatures if self.options.dir_no_code_signatures is False and name.endswith(check_signatures_bundles) and not name.startswith('.'): #meaning DO process code signatures record['code_signatures'] = str(get_codesignatures(os.path.join(root, name))) output.write_entry(record.values()) filePool.close() filePool.join() if not self.options.quiet: sys.stdout.write('\n') sys.stdout.flush()
def run(self): output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) # -------------BEGIN MODULE-SPECIFIC LOGIC------------- # globalpreferences = read_bplist( os.path.join(self.options.inputdir, 'Library/Preferences/.GlobalPreferences.plist')) preferences = plistlib.readPlist( os.path.join( self.options.inputdir, 'Library/Preferences/SystemConfiguration/preferences.plist')) systemversion = plistlib.readPlist( os.path.join(self.options.inputdir, 'System/Library/CoreServices/SystemVersion.plist')) # KEEP THE LINE BELOW TO GENERATE AN ORDEREDDICT BASED ON THE HEADERS record = OrderedDict((h, '') for h in self._headers) record['local_hostname'] = finditem(preferences, 'LocalHostName') record['ipaddress'] = self.options.full_prefix.split(',')[2] computer_name = finditem(preferences, 'ComputerName') if computer_name is not None: record['computer_name'] = computer_name.encode('utf-8') record['hostname'] = finditem(preferences, 'HostName') record['model'] = finditem(preferences, 'Model') record['product_version'] = self.options.os_version record['product_build_version'] = finditem(systemversion, 'ProductBuildVersion') g = glob.glob( os.path.join( self.options.inputdir, 'private/var/folders/zz/zyxvpxvq6csfxvn_n00000sm00006d/C/*')) check_dbs = [ 'consolidated.db', 'cache_encryptedA.db', 'lockCache_encryptedA.db' ] serial_dbs = [ loc for loc in g if any(loc.endswith(db) for db in check_dbs) ] serial_query = 'SELECT SerialNumber FROM TableInfo;' for db in serial_dbs: try: cursor = sqlite3.connect(db).cursor() record['serial_no'] = cursor.execute( serial_query).fetchone()[0] break except Exception: record['serial_no'] = 'ERROR' record['volume_created'] = stats2(self.options.inputdir + "/", oMACB=True)['btime'] record['amtc_runtime'] = str(self.options.start_time).replace( ' ', 'T').replace('+00:00', 'Z') if 'Volumes' not in self.options.inputdir and self.options.forensic_mode is not True: tz, e = subprocess.Popen(["systemsetup", "-gettimezone"], stdout=subprocess.PIPE).communicate() record['system_tz'] = tz.rstrip().replace('Time Zone: ', '') _fdestatus, e = subprocess.Popen( ["fdesetup", "status"], stdout=subprocess.PIPE).communicate() if 'On' in _fdestatus: record['fvde_status'] = "On" else: record['fvde_status'] = "Off" else: try: record['system_tz'] = globalpreferences[0][ 'com.apple.TimeZonePref.Last_Selected_City'][3] except Exception: self.log.error("Could not get system timezone: {0}".format( [traceback.format_exc()])) record['system_tz'] = "ERROR" record['fvde_status'] = "NA" # PROVIDE OUTPUT LINE, AND WRITE TO OUTFILE line = record.values() output.write_entry(line)
class ChromeModule(AutoMacTCModule): _mod_filename = __name__ _profile_headers = [ 'user', 'profile', 'active_time', 'is_using_default_avatar', 'is_omitted_from_profile_list', 'name', 'gaia_picture_file_name', 'user_name', 'managed_user_id', 'gaia_name', 'avatar_icon', 'gaia_id', 'local_auth_credentials', 'gaia_given_name', 'is_using_default_name', 'background_apps', 'is_ephemeral' ] _downloads_headers = [ 'user', 'profile', 'download_path', 'current_path', 'download_started', 'download_finished', 'danger_type', 'opened', 'last_modified', 'referrer', 'tab_url', 'tab_referrer_url', 'download_url', 'url' ] _urls_headers = [ 'user', 'profile', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time', 'typed_count', 'visit_duration', 'search_term' ] def __init__(self, *args, **kwargs): super(ChromeModule, self).__init__(*args, **kwargs) self.chrome_location = glob.glob( os.path.join(self.options.inputdir, 'Users/*/Library/Application Support/Google/Chrome/')) self._profiles_output = DataWriter('browser_chrome_profiles', self._profile_headers, self.log, self.run_id, self.options) self._downloads_output = DataWriter('browser_chrome_downloads', self._downloads_headers, self.log, self.run_id, self.options) self._urls_output = DataWriter('browser_chrome_history', self._urls_headers, self.log, self.run_id, self.options) def _parse_profiles(self, profile_data, user): self.log.debug("Success. Found metadata for {0} profiles.".format( len(profile_data.items()))) for k, v in profile_data.items(): record = OrderedDict((h, '') for h in self._profile_headers) record['user'] = user record['profile'] = k for key, val in v.items(): if key in self._profile_headers: record[key] = val record['active_time'] = firefox_time(record['active_time'] * 1000000) self._profiles_output.write_entry(record.values()) def _process_profiles(self): for c in self.chrome_location: userpath = c.split('/') userindex = userpath.index('Users') + 1 user = userpath[userindex] self.log.debug( "Parsing Chrome Local State data under {0} user.".format(user)) localstate_file = os.path.join(c, 'Local State') if os.path.exists(localstate_file): with open(localstate_file, 'r') as data: jdata = json.loads(data.read()) chrome_ver = finditem(jdata, "stats_version") self.log.debug( "Chrome version {0} identified.".format(chrome_ver)) profile_data = finditem(jdata, "info_cache") self._parse_profiles(profile_data, user) else: self.log.debug("File not found: {0}".format(localstate_file)) def _pull_visit_history(self, conn, user, prof): self.log.debug("Executing sqlite query for visit history...") try: urls_data = conn.cursor().execute( 'SELECT visit_time, urls.url, title, visit_duration, visit_count, \ typed_count, urls.last_visit_time, term \ from visits left join urls on visits.url = urls.id \ left join keyword_search_terms on keyword_search_terms.url_id = urls.id' ).fetchall() self.log.debug("Success. Found {0} lines of data.".format( len(urls_data))) except Exception: self.log.debug('Failed to run query: {0}'.format( [traceback.format_exc()])) u_cnames = self._get_column_headers(conn, 'urls') self.log.debug('Columns available: {0}'.format(str(u_cnames))) v_cnames = self._get_column_headers(conn, 'visits') self.log.debug('Columns available: {0}'.format(str(v_cnames))) k_cnames = self._get_column_headers(conn, 'keyword_search_terms') self.log.debug('Columns available: {0}'.format(str(k_cnames))) return self.log.debug("Parsing and writing visits data...") for item in urls_data: record = OrderedDict((h, '') for h in self._urls_headers) item = list(item) record['user'] = user record['profile'] = prof record['visit_time'] = chrome_time(item[0]) record['url'] = item[1] record['title'] = item[2].encode('utf-8') record['visit_duration'] = time.strftime( "%H:%M:%S", time.gmtime(item[3] / 1000000)) record['visit_count'] = item[4] record['typed_count'] = item[5] record['last_visit_time'] = chrome_time(item[6]) search_term = item[7] if search_term is not None: record['search_term'] = item[7].encode('utf-8') else: record['search_term'] = '' self._urls_output.write_entry(record.values()) self.log.debug("Done.") def _pull_download_history(self, conn, user, prof): self.log.debug("Executing sqlite query for download history...") try: downloads_data = conn.cursor().execute( 'SELECT current_path, target_path, start_time, end_time, danger_type, opened, \ last_modified, referrer, tab_url, tab_referrer_url, site_url, url from downloads \ left join downloads_url_chains on downloads_url_chains.id = downloads.id' ).fetchall() self.log.debug("Success. Found {0} lines of data.".format( len(downloads_data))) except Exception: self.log.debug('Failed to run query: {0}'.format( [traceback.format_exc()])) duc_cnames = self._get_column_headers(conn, 'downloads_url_chains') self.log.debug('Columns available: {0}'.format(str(duc_cnames))) d_cnames = self._get_column_headers(conn, 'downloads') self.log.debug('Columns available: {0}'.format(str(d_cnames))) return self.log.debug("Parsing and writing downloads data...") for item in downloads_data: record = OrderedDict((h, '') for h in self._downloads_headers) item = list(item) record['user'] = user record['profile'] = prof record['current_path'] = item[0].encode('utf-8') record['download_path'] = item[1].encode('utf-8') record['download_started'] = chrome_time(item[2]) record['download_finished'] = chrome_time(item[3]) record['danger_type'] = item[4] record['opened'] = item[5] if item[6] != '': last_modified = parser.parse(item[6]).replace(tzinfo=pytz.UTC) record['last_modified'] = last_modified.isoformat().replace( '+00:00', 'Z') else: record['last_modified'] = '' record['referrer'] = item[7] record['tab_url'] = item[8] record['tab_referrer_url'] = item[9] record['download_url'] = item[10] record['url'] = item[11] self._downloads_output.write_entry(record.values()) self.log.debug("Done.") def _get_column_headers(self, conn, column): col_headers = conn.cursor().execute('SELECT * from {0}'.format(column)) names = list(map(lambda x: x[0], col_headers.description)) return names def _get_chrome_version(self, history_db): version = sqlite3.connect(history_db).cursor().execute( 'SELECT key, value FROM meta where key="version"').fetchall() ver = OrderedDict(version)['version'] self.log.debug( "Chrome History database meta version {0} identified.".format(ver)) return ver def _connect_to_db(self, chrome_file): try: self.log.debug( "Trying to connect to {0} directly...".format(chrome_file)) history_db = chrome_file ver = self._get_chrome_version(history_db) self.log.debug("Successfully connected.") except sqlite3.OperationalError: error = [ x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError") ] self.log.debug("Could not connect [{0}].".format(error[0])) if "database is locked" in error[0]: tmpdb = os.path.basename(chrome_file) + '-tmp' self.log.debug( "Trying to connect to db copied to temp location...") shutil.copyfile(history_db, os.path.join(self.options.outputdir, tmpdb)) history_db = os.path.join(self.options.outputdir, tmpdb) try: ver = self._get_chrome_version(history_db) self.log.debug("Successfully connected.") except: error = [ x for x in traceback.format_exc().split('\n') if x.startswith("OperationalError") ] self.log.debug("Could not connect [{0}].".format(error[0])) self.log.error( "Module fatal error: cannot parse database.") history_db = None return history_db def run(self): self._process_profiles() # make a full list of all chrome profiles under all chrome dirs full_list_raw = [ multiglob(c, ['Default', 'Profile *', 'Guest Profile']) for c in self.chrome_location ] full_list = list(itertools.chain.from_iterable(full_list_raw)) for prof in full_list: userpath = prof.split('/') userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1 user = userpath[userindex] chromeindex = userpath.index('Chrome') + 1 profile = userpath[chromeindex] self.log.debug( "Starting parsing for Chrome history under {0} user.".format( user)) history_db = self._connect_to_db(os.path.join(prof, 'History')) if history_db: conn = sqlite3.connect(history_db) self._pull_visit_history(conn, user, profile) self._pull_download_history(conn, user, profile) try: os.remove(os.path.join(self.options.outputdir, 'History-tmp')) except OSError: pass
class AutoRunsModule(AutoMacTCModule): _mod_filename = __name__ _headers = [ 'mtime', 'atime', 'ctime', 'btime', 'src_name', 'src_file', 'prog_name', 'program', 'args', 'code_signatures', 'sha256', 'md5' ] def __init__(self, *args, **kwargs): super(AutoRunsModule, self).__init__(*args, **kwargs) self._output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options) def _shasum(self, filename, filesize, block_size=65536): if os.path.isfile(filename) is False: sha256 = 'ERROR-FILE-DNE' return sha256 if filesize <= self.options.dir_hash_size_limit and filesize > 0: sha256 = hashlib.sha256() try: with open(filename, 'rb') as f: for block in iter(lambda: f.read(block_size), b''): sha256.update(block) sha256 = sha256.hexdigest() except IOError: sha256 = 'ERROR' else: sha256 = '' return sha256 def _md5sum(self, filename, filesize, block_size=65536): if os.path.isfile(filename) is False: sha256 = 'ERROR-FILE-DNE' return sha256 if filesize <= self.options.dir_hash_size_limit and filesize > 0: md5 = hashlib.md5() try: with open(filename, 'rb') as f: for block in iter(lambda: f.read(block_size), b''): md5.update(block) md5 = md5.hexdigest() except: md5 = 'ERROR' else: md5 = '' return md5 def _get_hashes(self, program): hashes = {'sha256': '', 'md5': ''} if "none" not in self.options.dir_hash_alg: size = stats2(program)['size'] if 'sha256' in self.options.dir_hash_alg: try: hashes['sha256'] = self._shasum(program, size) except: self.log.debug("Could not hash {0}: {1}".format( program, [traceback.format_exc()])) hashes['sha256'] = 'ERROR' if 'md5' in self.options.dir_hash_alg: try: hashes['md5'] = self._md5sum(program, size) except: self.log.debug("Could not hash {0}: {1}".format( program, [traceback.format_exc()])) hashes['md5'] = 'ERROR' return hashes def _parse_sandboxed_loginitems(self): sandboxed_loginitems = multiglob( self.options.inputdir, ['var/db/com.apple.xpc.launchd/disabled.*.plist']) for i in sandboxed_loginitems: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "sandboxed_loginitems" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: self.log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': for k, v in p.items(): if v is False: record['prog_name'] = k self._output.write_entry(record.values()) else: errors = { k: 'ERROR-CNR-PLIST' for k, v in record.items() if v == '' } record.update(errors) def _parse_cron(self): cron = multiglob(self.options.inputdir, ['private/var/at/tabs/*']) for i in cron: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "cron" with open(i, 'r') as crontab: jobs = [ c.rstrip() for c in crontab.readlines() if not c.startswith("# ") ] for job in jobs: record['program'] = job self._output.write_entry(record.values()) def _parse_LaunchAgentsDaemons(self): LaunchAgents = multiglob(self.options.inputdir, [ 'System/Library/LaunchAgents/*.plist', 'Library/LaunchAgents/*.plist', 'Users/*/Library/LaunchAgents/*.plist', 'System/Library/LaunchAgents/.*.plist', 'Library/LaunchAgents/.*.plist', 'Users/*/Library/LaunchAgents/.*.plist' ]) LaunchDaemons = multiglob(self.options.inputdir, [ 'System/Library/LaunchDaemons/*.plist', 'Library/LaunchDaemons/*.plist', 'System/Library/LaunchDaemons/.*.plist', 'Library/LaunchDaemons/.*.plist' ]) for i in LaunchDaemons + LaunchAgents: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "launch_items" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: self.log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': if type(p) is list and len(p) > 0: p = p[0] # Try to get Label from each plist. try: record['prog_name'] = p['Label'] except KeyError: self.log.debug( "Cannot extract 'Label' from plist: {0}".format(i)) record['prog_name'] = 'ERROR' # Try to get ProgramArguments if present, or Program, from each plist. try: prog_args = p['ProgramArguments'] program = p['ProgramArguments'][0] record['program'] = program if len(prog_args) > 1: record['args'] = ' '.join(p['ProgramArguments'][1:]) except (KeyError, IndexError): try: program = p['Program'] record['program'] = program except: self.log.debug( "Cannot extract 'Program' or 'ProgramArguments' from plist: {0}" .format(i)) program = None record['program'] = 'ERROR' record['args'] = 'ERROR' except Exception: self.log.debug('Could not parse plist {0}: {1}'.format( i, [traceback.format_exc()])) program = None # If program is ID'd, run additional checks. if program: cs_check_path = os.path.join(self.options.inputdir, program.lstrip('/')) record['code_signatures'] = str( get_codesignatures( cs_check_path, self.options.dir_no_code_signatures)) hashset = self._get_hashes(program) record['sha256'] = hashset['sha256'] record['md5'] = hashset['md5'] else: errors = { k: 'ERROR-CNR-PLIST' for k, v in record.items() if v == '' } record.update(errors) self._output.write_entry(record.values()) def _parse_ScriptingAdditions(self): ScriptingAdditions = multiglob(self.options.inputdir, [ 'System/Library/ScriptingAdditions/*.osax', 'Library/ScriptingAdditions/*.osax', 'System/Library/ScriptingAdditions/.*.osax', 'Library/ScriptingAdditions/.*.osax' ]) for i in ScriptingAdditions: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "scripting_additions" record['code_signatures'] = str( get_codesignatures(i, self.options.dir_no_code_signatures)) self._output.write_entry(record.values()) def _parse_StartupItems(self): StartupItems = multiglob( self.options.inputdir, ['System/Library/StartupItems/*/*', 'Library/StartupItems/*/*']) for i in StartupItems: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "startup_items" self._output.write_entry(record.values()) def _parse_PeriodicItems_rcItems_emondItems(self): PeriodicItems = multiglob(self.options.inputdir, [ 'private/etc/periodic.conf', 'private/etc/periodic/*/*', 'private/etc/*.local' ]) rcItems = multiglob(self.options.inputdir, ['private/etc/rc.common']) emondItems = multiglob( self.options.inputdir, ['private/etc/emond.d/*', 'private/etc/emond.d/*/*']) for i in PeriodicItems + rcItems + emondItems: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "periodic_rules_items" self._output.write_entry(record.values()) def _parse_loginitems(self): user_loginitems_plist = multiglob( self.options.inputdir, ['Users/*/Library/Preferences/com.apple.loginitems.plist']) for i in user_loginitems_plist: record = OrderedDict((h, '') for h in self._headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "login_items" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: self.log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': items = p[0]['SessionItems']['CustomListItems'] for i in items: record['prog_name'] = i['Name'] if 'Alias' in i: try: alias_bin = i['Alias'] except: alias_bin = 'ERROR' if alias_bin != 'ERROR': c = [i.encode('hex') for i in alias_bin] for i in range(len(c)): l = int(c[i], 16) if l < len(c) and l > 2: test = os.path.join( self.options.inputdir, (''.join( c[i + 1:i + l + 1])).decode('hex')) try: if not os.path.exists(test): continue else: record['program'] = test cs_check_path = os.path.join( self.options.inputdir, test.lstrip('/')) record['code_signatures'] = str( get_codesignatures( cs_check_path, self.options. dir_no_code_signatures)) except: continue record['program'] = 'ERROR' record['code_signatures'] = 'ERROR' elif 'Bookmark' in i: try: bookmark_bin = i['Bookmark'] except: bookmark_bin = 'ERROR' if bookmark_bin != 'ERROR': program = [i.encode('hex') for i in bookmark_bin] data = Bookmark.from_bytes( ''.join(program).decode('hex')) d = data.get(0xf081, default=None) d = ast.literal_eval(str(d).replace('Data', '')) if d is not None: prog = d.split(';')[-1].replace('\x00', '') record['program'] = prog cs_check_path = os.path.join( self.options.inputdir, prog.lstrip('/')) record['code_signatures'] = str( get_codesignatures( cs_check_path, self.options.dir_no_code_signatures)) self._output.write_entry(record.values()) else: errors = { k: 'ERROR-CNR-PLIST' for k, v in record.items() if v == '' } record.update(errors) def run(self): self._parse_sandboxed_loginitems() self._parse_loginitems() self._parse_cron() self._parse_LaunchAgentsDaemons() self._parse_StartupItems() self._parse_ScriptingAdditions() self._parse_PeriodicItems_rcItems_emondItems()