def parse_sandboxed_loginitems(headers, output): sandboxed_loginitems = multiglob( inputdir, ['var/db/com.apple.xpc.launchd/disabled.*.plist']) for i in sandboxed_loginitems: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "sandboxed_loginitems" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': for k, v in p.items(): if v is False: record['prog_name'] = k output.write_entry(record.values()) else: errors = { k: 'ERROR-CNR-PLIST' for k, v in record.items() if v == '' } record.update(errors)
def module(): # KEEP THIS - ENABLES WRITING OUTPUT FILE. _output = data_writer(_modName, _headers) # -------------BEGIN MODULE-SPECIFIC LOGIC------------- # preferences = plistlib.readPlist(os.path.join(inputdir, 'Library/Preferences/SystemConfiguration/preferences.plist')) systemversion = plistlib.readPlist(os.path.join(inputdir, 'System/Library/CoreServices/SystemVersion.plist')) # KEEP THE LINE BELOW TO GENERATE AN ORDEREDDICT BASED ON THE HEADERS record = OrderedDict((h, '') for h in _headers) record['local_hostname'] = full_prefix.split(',')[1] record['ipaddress'] = full_prefix.split(',')[2] computer_name = finditem(preferences, 'ComputerName') if computer_name is not None: record['computer_name'] = computer_name.encode('utf-8') record['hostname'] = finditem(preferences, 'HostName') record['model'] = finditem(preferences, 'Model') record['product_version'] = OSVersion record['product_build_version'] = finditem(systemversion, 'ProductBuildVersion') g = glob.glob(os.path.join(inputdir, 'private/var/folders/zz/zyxvpxvq6csfxvn_n00000sm00006d/C/*')) check_dbs = ['consolidated.db', 'cache_encryptedA.db', 'lockCache_encryptedA.db'] serial_dbs = [loc for loc in g if any(db in loc for db in check_dbs)] serial_query = 'SELECT SerialNumber FROM TableInfo;' for db in serial_dbs: try: cursor = sqlite3.connect(db).cursor() record['serial_no'] = cursor.execute(serial_query).fetchone()[0] break except sqlite3.OperationalError: record['serial_no'] = 'SERIALERROR0' log.error("Could not retrieve system serial number.") record['volume_created'] = stats2(inputdir + "/", oMACB=True)['btime'] record['amtc_runtime'] = str(startTime).replace(' ', 'T').replace('+00:00', 'Z') if 'Volumes' not in inputdir and forensic_mode is not True: tz, e = subprocess.Popen(["systemsetup", "-gettimezone"], stdout=subprocess.PIPE).communicate() record['system_tz'] = tz.rstrip().replace('Time Zone: ', '') _fdestatus, e = subprocess.Popen(["fdesetup", "status"], stdout=subprocess.PIPE).communicate() if 'On' in _fdestatus: record['fvde_status'] = "On" else: record['fvde_status'] = "Off" else: record['system_tz'] = "DEAD_DISK" record['fvde_status'] = "NA" # PROVIDE OUTPUT LINE, AND WRITE TO OUTFILE line = record.values() _output.write_entry(line)
def module(): _headers = [ 'mtime', 'atime', 'ctime', 'btime', 'src_file', 'user', 'item_index', 'cmd' ] output = data_writer(_modName, _headers) user_inputdir = multiglob(inputdir, [ 'Users/*/.*_history', 'Users/*/.bash_sessions/*', 'private/var/*/.*_history', 'private/var/*/.bash_sessions/*', ]) # Generate debug messages indicating users with history files to be parsed. userlist = [] for file in user_inputdir: userpath = file.split('/') if 'Users' in userpath: userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1 else: userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1 user = userpath[userindex] userlist.append(user) for u in list(set(userlist)): log.debug( "Going to parse bash and other history under {0} user.".format(u)) # Parse history files found. for file in user_inputdir: # Get username from path. userpath = file.split('/') if 'Users' in userpath: userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1 else: userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1 user = userpath[userindex] # Parse the files. out = stats2(file) sess = open(file, 'r').readlines() indexer = 0 for line in sess: record = OrderedDict((h, '') for h in _headers) for i in _headers: if i in out: record[i] = out[i] record['src_file'] = out['name'] record['user'] = user record['cmd'] = line.rstrip() indexer += 1 record['item_index'] = indexer output.write_entry(record.values())
def parse_StartupItems(headers, output): StartupItems = multiglob( inputdir, ['System/Library/StartupItems/*/*', 'Library/StartupItems/*/*']) for i in StartupItems: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "startup_items" output.write_entry(record.values())
def module(): _headers = [ 'mtime', 'atime', 'ctime', 'btime', 'src_file', 'user', 'item_index', 'cmd' ] output = data_writer(_modName, _headers) user_inputdir = glob.glob(os.path.join(inputdir, "Users/*")) user_inputdir += glob.glob(os.path.join(inputdir, "var/*")) for user_home in user_inputdir: # Get username from path. user = os.path.basename(user_home) # Get all _history files in root of user directory. bash_loc = os.path.join(user_home, '.*_history') u_bash = glob.glob(bash_loc) if len(u_bash) == 0: log.debug("Files not found in: {0}".format(bash_loc)) # Get all bash_sessions files .bash_sessions directory. bash_sess_loc = os.path.join(user_home, '.bash_sessions/*') u_bash_sess = glob.glob(bash_sess_loc) if len(u_bash_sess) == 0: log.debug("Files not found in: {0}".format(bash_sess_loc)) # Combine all files into a list and parse them iteratively. if len(u_bash) != 0 or len(u_bash_sess) != 0: log.debug( "Going to parse bash and other history under {0} user.".format( user)) u_bash_all = u_bash + u_bash_sess for sess in u_bash_all: out = stats2(sess) sess = open(sess, 'r').readlines() indexer = 0 for line in sess: record = OrderedDict((h, '') for h in _headers) for i in _headers: if i in out: record[i] = out[i] record['src_file'] = out['name'] record['user'] = user record['cmd'] = line.rstrip() indexer += 1 record['item_index'] = indexer output.write_entry(record.values())
def parse_ScriptingAdditions(headers, output): ScriptingAdditions = multiglob(inputdir, [ 'System/Library/ScriptingAdditions/*.osax', 'Library/ScriptingAdditions/*.osax', 'System/Library/ScriptingAdditions/.*.osax', 'Library/ScriptingAdditions/.*.osax' ]) for i in ScriptingAdditions: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "scripting_additions" record['code_signatures'] = str(get_codesignatures(i, ncs)) output.write_entry(record.values())
def parse_PeriodicItems_rcItems_emondItems(headers, output): PeriodicItems = multiglob(inputdir, [ 'private/etc/periodic.conf', 'private/etc/periodic/*/*', 'private/etc/*.local' ]) rcItems = multiglob(inputdir, ['private/etc/rc.common']) emondItems = multiglob( inputdir, ['private/etc/emond.d/*', 'private/etc/emond.d/*/*']) for i in PeriodicItems + rcItems + emondItems: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "periodic_rules_items" output.write_entry(record.values())
def handle_files(name): global counter counter += 1 if not quiet: if debug: sys.stdout.write( 'dirlist : INFO Wrote %d lines in %s | FileName: %s \033[K\r' % (counter, datetime.now(pytz.UTC) - startTime, name)) else: sys.stdout.write( 'dirlist : INFO Wrote %d lines in %s \r' % (counter, datetime.now(pytz.UTC) - startTime)) sys.stdout.flush() # get timestamps and metadata for each file record = OrderedDict((h, '') for h in headers) stat_data = stats2(os.path.join(root, name)) record.update(stat_data) # get quarantine extended attribute for each file, if available if stat_data['mode'] != "Other": try: quarantine = xattr_get(os.path.join(root, name), "com.apple.quarantine").split(';')[2] except: quarantine = xattr_get(os.path.join(root, name), "com.apple.quarantine") record['quarantine'] = quarantine.replace('\\x20', ' ') # get wherefrom extended attribute for each file, if available wherefrom = xattr_get(os.path.join(root, name), "com.apple.metadata:kMDItemWhereFroms") if wherefrom != "" and wherefrom.startswith("bplist"): record['wherefrom_1'] = wherefrom else: record['wherefrom_1'] = [''] # if hash alg is specified 'none' at amtc runtime, do not hash files. else do sha256 and md5 as specified (sha256 is default at runtime, md5 is user-specified) if "none" not in hash_alg and stat_data['mode'] == "Regular File": if 'sha256' in hash_alg: record['sha256'] = shasum(os.path.join(root, name), record['size']) if 'md5' in hash_alg: record['md5'] = md5sum(os.path.join(root, name), record['size']) # output.write_entry(record.values()) return record
def parse_cron(headers, output): cron = multiglob(inputdir, ['private/var/at/tabs/*']) for i in cron: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "cron" with open(i, 'r') as crontab: jobs = [ c.rstrip() for c in crontab.readlines() if not c.startswith("# ") ] for job in jobs: record['program'] = job output.write_entry(record.values())
def get_hashes(program): hashes = {'sha256': '', 'md5': ''} if "none" not in hash_alg: size = stats2(program)['size'] if 'sha256' in hash_alg: try: hashes['sha256'] = shasum(program, size) except: log.debug("Could not hash {0}: {1}".format( program, [traceback.format_exc()])) hashes['sha256'] = 'ERROR' if 'md5' in hash_alg: try: hashes['md5'] = md5sum(program, size) except: log.debug("Could not hash {0}: {1}".format( program, [traceback.format_exc()])) hashes['md5'] = 'ERROR' return hashes
def parse_LaunchAgentsDaemons(headers, output): LaunchAgents = multiglob(inputdir, [ 'System/Library/LaunchAgents/*.plist', 'Library/LaunchAgents/*.plist', 'Users/*/Library/LaunchAgents/*.plist', 'System/Library/LaunchAgents/.*.plist', 'Library/LaunchAgents/.*.plist', 'Users/*/Library/LaunchAgents/.*.plist' ]) LaunchDaemons = multiglob(inputdir, [ 'System/Library/LaunchDaemons/*.plist', 'Library/LaunchDaemons/*.plist', 'System/Library/LaunchDaemons/.*.plist', 'Library/LaunchDaemons/.*.plist' ]) for i in LaunchDaemons + LaunchAgents: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "launch_items" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': if type(p) is list and len(p) > 0: p = p[0] # Try to get Label from each plist. try: record['prog_name'] = p['Label'] except KeyError: log.debug("Cannot extract 'Label' from plist: {0}".format(i)) record['prog_name'] = 'ERROR' # Try to get ProgramArguments if present, or Program, from each plist. try: prog_args = p['ProgramArguments'] program = p['ProgramArguments'][0] record['program'] = program if len(prog_args) > 1: record['args'] = ' '.join(p['ProgramArguments'][1:]) except (KeyError, IndexError), e: try: program = p['Program'] record['program'] = program except: log.debug( "Cannot extract 'Program' or 'ProgramArguments' from plist: {0}" .format(i)) program = None record['program'] = 'ERROR' record['args'] = 'ERROR' except Exception, e: log.debug('Could not parse plist {0}: {1}'.format( i, [traceback.format_exc()])) program = None # If program is ID'd, run additional checks. if program: cs_check_path = os.path.join(inputdir, program.lstrip('/')) record['code_signatures'] = str( get_codesignatures(cs_check_path, ncs)) hashset = get_hashes(program) record['sha256'] = hashset['sha256'] record['md5'] = hashset['md5']
def module(): headers = [ 'uid', 'path', 'name', 'last_hit_date', 'hit_count', 'file_last_modified', 'generator', 'file_size' ] output = data_writer(_modName, headers) q_loc = os.path.join( inputdir, 'private/var/folders/*/*/C/com.apple.QuickLook.thumbnailcache/index.sqlite' ) qlist = glob.glob(q_loc) if OSVersion is not None: ver = float('.'.join(OSVersion.split('.')[1:])) if ver > 14.0 and ver is not None and forensic_mode is not True: log.error( "Artifacts are inaccessible on and above OS version 10.14 on live systems." ) return else: if forensic_mode is not True: log.debug( "OSVersion not detected, but going to try to parse anyway.") else: log.error( "OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems." ) return if len(qlist) == 0: log.debug("Files not found in: {0}".format(q_loc)) ql_sql = 'SELECT distinct k.folder, k.file_name, t.hit_count, t.last_hit_date, k.version \ FROM (SELECT rowid AS f_rowid,folder,file_name,version FROM files) k \ LEFT JOIN thumbnails t ON t.file_id = k.f_rowid ORDER BY t.hit_count DESC' for qfile in qlist: uid = stats2(qfile)['uid'] data = query_db(qfile, ql_sql, outputdir) for item in data: item = list(item) record = OrderedDict((h, '') for h in headers) record['uid'] = uid record['path'] = item[0].encode('utf-8') record['name'] = item[1].encode('utf-8') if item[3]: record['last_hit_date'] = cocoa_time(item[3]) else: record['last_hit_date'] = '' if item[2]: record['hit_count'] = item[2] else: record['hit_count'] = '' try: plist_array = read_stream_bplist(item[4]) record['file_last_modified'] = cocoa_time(plist_array['date']) record['generator'] = plist_array['gen'] try: record['file_size'] = int(plist_array['size']) except KeyError: record['file_size'] = 'Error' except Exception, e: log.error( "Could not parse: embedded binary plist for record {0}". format(record['name'])) output.write_entry(record.values())
] output = data_writer(_modName, headers) # if there are specific directories to recurse, recurse them. if dirlist_include_dirs != ['']: root_list = [] for i in dirlist_include_dirs: root_list.append(os.path.join(inputdir, i)) root_list = list( itertools.chain.from_iterable([glob.glob(i) for i in root_list])) # if there are no specific directories to recurse, recurse from the root of the inputdir. also write the stats data to else: root_list = glob.glob(inputdir) record = OrderedDict((h, '') for h in headers) stat_data = stats2(inputdir) record.update(stat_data) output.write_entry(record.values()) # by default (if no-defaults is NOT in exclusion flag) exclude the following directories if 'no-defaults' not in dirlist_exclude_dirs: if not forensic_mode: default_exclude = [ '.fseventsd', '.DocumentRevisions-V100', '.Spotlight-V100', 'Users/*/Pictures', 'Users/*/Library/Application Support/AddressBook', 'Users/*/Calendar', 'Users/*/Library/Calendars', 'Users/*/Library/Preferences/com.apple.AddressBook.plist' ] else: default_exclude = [
_loginwindow = os.path.join( inputdir, 'Library/Preferences/com.apple.loginwindow.plist') if not os.path.exists(_loginwindow): log.debug("File not found: {0}".format(_loginwindow)) else: try: lastuser = read_bplist(_loginwindow)[0]['lastUserName'] except Exception, e: lastuser = "" log.debug("Could not parse: {0}".format(_loginwindow)) for user_path in _liveusers: user_home = os.path.basename(user_path) if user_home not in ['.localized', 'Shared']: record = OrderedDict((h, '') for h in headers) oMACB = stats2(user_path, oMACB=True) record.update(oMACB) if user_home in admins: record['admin'] = 'Yes' if user_home == lastuser: record['lastloggedin_user'] = '******' _liveplists = [] record['user'] = user_home if len(_liveplists) > 0: i_plist = user_home + '.plist' if i_plist in _liveplists: i_plist_array = read_bplist( os.path.join(_liveusers_plists, i_plist))[0] record['uniq_id'] = i_plist_array['uid'][0] record['real_name'] = i_plist_array['realname'][0]
def module(): ver = float('.'.join(OSVersion.split('.')[1:])) if ver < 13: log.error("Artifacts are not present below OS version 10.13.") return headers = [ 'src_report', 'diag_start', 'diag_end', 'name', 'uuid', 'processName', 'appDescription', 'appName', 'appVersion', 'foreground', 'uptime', 'uptime_parsed', 'powerTime', 'powerTime_parsed', 'activeTime', 'activeTime_parsed', 'activations', 'launches', 'activityPeriods', 'idleTimeouts', 'Uptime', 'Count', 'version', 'identifier', 'overflow' ] output = data_writer(_modName, headers) analytics_location = multiglob(inputdir, [ 'Library/Logs/DiagnosticReports/Analytics*.core_analytics', 'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics' ]) if len(analytics_location) < 1: log.debug("No .core_analytics files found.") else: log.debug("Found {0} .core_analytics files to parse.".format( len(analytics_location))) counter = 0 for file in analytics_location: data = open(file, 'r').read() data_lines = [ json.loads(i) for i in data.split('\n') if i.startswith("{\"message\":") ] try: diag_start = [ json.loads(i) for i in data.split('\n') if i.startswith("{\"_marker\":") and "end-of-file" not in i ][0]['startTimestamp'] except ValueError: diag_start = "ERROR" try: diag_end = [ json.loads(i) for i in data.split('\n') if i.startswith("{\"timestamp\":") ][0]['timestamp'] diag_end = str(parser.parse(diag_end).astimezone(pytz.utc)) diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z') except ValueError: diag_end = "ERROR" for i in data_lines: record = OrderedDict((h, '') for h in headers) record['src_report'] = file record['diag_start'] = diag_start record['diag_end'] = diag_end record['name'] = i['name'] record['uuid'] = i['uuid'] # If any fields not currently recorded (based on the headers above) appear, # they will be added to overflow. record['overflow'] = {} for k, v in i['message'].items(): if k in record.keys(): record[k] = i['message'][k] else: record['overflow'].update({k: v}) if len(record['overflow']) == 0: record['overflow'] = '' if record['uptime'] != '': record['uptime_parsed'] = time.strftime( "%H:%M:%S", time.gmtime(record['uptime'])) if record['activeTime'] != '': record['activeTime_parsed'] = time.strftime( "%H:%M:%S", time.gmtime(record['activeTime'])) if record['powerTime'] != '': record['powerTime_parsed'] = time.strftime( "%H:%M:%S", time.gmtime(record['powerTime'])) if record['appDescription'] != '': record['appName'] = record['appDescription'].split(' ||| ')[0] record['appVersion'] = record['appDescription'].split( ' ||| ')[1] line = record.values() output.write_entry(line) counter += 1 # Parse aggregate files either from their directory on disk. agg_location = glob.glob( os.path.join( inputdir, 'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849' )) if ver > 13.6: log.debug( "Cannot currently parse aggregate file above OS version 10.13.6.") return if len(agg_location) < 1: log.debug("No aggregate files found.") else: log.debug("Found {0} aggregate files to parse.".format( len(agg_location))) for aggregate in agg_location: data = open(aggregate, 'r').read() try: data_lines = json.loads(data) except ValueError: data_lines = json.loads(json.dumps(list(ast.literal_eval(data)))) diag_start = stats2(aggregate)['btime'] diag_end = stats2(aggregate)['mtime'] raw = [ i for i in data_lines if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7) ] for i in raw: record = OrderedDict((h, '') for h in headers) record['src_report'] = aggregate record['diag_start'] = diag_start record['diag_end'] = diag_end record['uuid'] = os.path.basename(aggregate) record['processName'] = i[0][0] record['appDescription'] = i[0][1] if record['appDescription'] != '': record['appName'] = record['appDescription'].split(' ||| ')[0] record['appVersion'] = record['appDescription'].split( ' ||| ')[1] record['foreground'] = i[0][2] record['uptime'] = i[1][0] record['uptime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][0])) record['activeTime'] = i[1][1] record['activeTime_parsed'] = time.strftime( "%H:%M:%S", time.gmtime(i[1][1])) record['launches'] = i[1][2] record['idleTimeouts'] = i[1][3] record['activations'] = i[1][4] record['activityPeriods'] = i[1][5] record['powerTime'] = i[1][6] record['powerTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][6])) line = record.values() output.write_entry(line) counter += 1 if counter > 0: log.debug("Done. Wrote {0} lines.".format(counter))
def parse_loginitems(headers, output): user_loginitems_plist = multiglob( inputdir, ['Users/*/Library/Preferences/com.apple.loginitems.plist']) for i in user_loginitems_plist: record = OrderedDict((h, '') for h in headers) metadata = stats2(i, oMACB=True) record.update(metadata) record['src_file'] = i record['src_name'] = "login_items" try: p = plistlib.readPlist(i) except: try: p = read_bplist(i) except: log.debug('Could not read plist {0}: {1}'.format( i, [traceback.format_exc()])) p = 'ERROR' if p != 'ERROR': items = p[0]['SessionItems']['CustomListItems'] for i in items: record['prog_name'] = i['Name'] if 'Alias' in i: try: alias_bin = i['Alias'] except: alias_bin = 'ERROR' if alias_bin != 'ERROR': c = [i.encode('hex') for i in alias_bin] for i in range(len(c)): l = int(c[i], 16) if l < len(c) and l > 2: test = os.path.join(inputdir, (''.join( c[i + 1:i + l + 1])).decode('hex')) try: if not os.path.exists(test): continue else: record['program'] = test cs_check_path = os.path.join( inputdir, test.lstrip('/')) record['code_signatures'] = str( get_codesignatures( cs_check_path, ncs)) except: continue record['program'] = 'ERROR' record['code_signatures'] = 'ERROR' elif 'Bookmark' in i: try: bookmark_bin = i['Bookmark'] except: bookmark_bin = 'ERROR' if bookmark_bin != 'ERROR': program = [i.encode('hex') for i in bookmark_bin] data = Bookmark.from_bytes( ''.join(program).decode('hex')) d = data.get(0xf081, default=None) d = ast.literal_eval(str(d).replace('Data', '')) if d is not None: prog = d.split(';')[-1].replace('\x00', '') record['program'] = prog cs_check_path = os.path.join( inputdir, prog.lstrip('/')) record['code_signatures'] = str( get_codesignatures(cs_check_path, ncs)) output.write_entry(record.values()) else: errors = { k: 'ERROR-CNR-PLIST' for k, v in record.items() if v == '' } record.update(errors)
loc for loc in g if any(loc.endswith(db) for db in check_dbs) ] serial_query = 'SELECT SerialNumber FROM TableInfo;' for db in serial_dbs: try: cursor = sqlite3.connect(db).cursor() record['serial_no'] = cursor.execute(serial_query).fetchone()[0] break except Exception, e: log.debug("Could not get serial number from {0}: {1}".format( db, [traceback.format_exc()])) record['serial_no'] = 'ERROR' record['volume_created'] = stats2(inputdir + "/", oMACB=True)['btime'] record['amtc_runtime'] = str(startTime).replace(' ', 'T').replace( '+00:00', 'Z') if 'Volumes' not in inputdir and forensic_mode is not True: tz, e = subprocess.Popen(["systemsetup", "-gettimezone"], stdout=subprocess.PIPE).communicate() record['system_tz'] = tz.rstrip().replace('Time Zone: ', '') _fdestatus, e = subprocess.Popen(["fdesetup", "status"], stdout=subprocess.PIPE).communicate() if 'On' in _fdestatus: record['fvde_status'] = "On" else: record['fvde_status'] = "Off"
except Exception, e: data_lines = [] log.debug("Could not parse aggregate file: {0}.".format( [traceback.format_exc()])) except Exception, e: data_lines = [] log.debug("Could not parse aggregate file: {0}.".format( [traceback.format_exc()])) else: data_lines = [] log.debug( "Could not parse aggregate file. File had unusual number of objects to parse: {0}. | {1}" .format(str(len(obj_list)), [traceback.format_exc()])) diag_start = stats2(aggregate)['btime'] diag_end = stats2(aggregate)['mtime'] raw = [ i for i in data_lines if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7) ] for i in raw: record = OrderedDict((h, '') for h in headers) record['src_report'] = aggregate record['diag_start'] = diag_start record['diag_end'] = diag_end record['uuid'] = os.path.basename(aggregate) record['processName'] = i[0][0]