Example #1
0
def parse_sandboxed_loginitems(headers, output):
    sandboxed_loginitems = multiglob(
        inputdir, ['var/db/com.apple.xpc.launchd/disabled.*.plist'])

    for i in sandboxed_loginitems:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "sandboxed_loginitems"

        try:
            p = plistlib.readPlist(i)
        except:
            try:
                p = read_bplist(i)
            except:
                log.debug('Could not read plist {0}: {1}'.format(
                    i, [traceback.format_exc()]))
                p = 'ERROR'

        if p != 'ERROR':
            for k, v in p.items():
                if v is False:
                    record['prog_name'] = k
                    output.write_entry(record.values())
        else:
            errors = {
                k: 'ERROR-CNR-PLIST'
                for k, v in record.items() if v == ''
            }
            record.update(errors)
Example #2
0
def parse_sfl2s(headers, output):
    sfl2_list = multiglob(inputdir, [
        'Users/*/Library/Application Support/com.apple.sharedfilelist/*.sfl2',
        'Users/*/Library/Application Support/com.apple.sharedfilelist/*/*.sfl2'
    ])

    for mru_file in sfl2_list:
        plist_objects = ccl_bplist.deserialise_NsKeyedArchiver(
            ccl_bplist.load(open(mru_file, "rb")), parse_whole_structure=True)

        try:
            if plist_objects["root"]["NS.objects"][1]["NS.keys"][
                    0] == "com.apple.LSSharedFileList.MaxAmount":
                numberOfItems = plist_objects["root"]["NS.objects"][1][
                    "NS.objects"][0]
        except Exception, e:
            pass

        try:
            if plist_objects["root"]["NS.keys"][0] == "items":
                items = plist_objects["root"]["NS.objects"][0]["NS.objects"]
        except Exception, e:
            log.debug('Could not parse SFL {0}: {1}'.format(
                mru_file, [traceback.format_exc()]))
            items = None
Example #3
0
def module():
    headers = ['user', 'shortcut', 'display_name', 'last_used', 'url']
    output = data_writer(_modName, headers)

    user_inputdir = multiglob(inputdir, [
        'Users/*/Library/Application Support/com.apple.spotlight.Shortcuts',
        'private/var/*/Library/Application Support/com.apple.spotlight.Shortcuts'
    ])

    for file in user_inputdir:
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        log.debug(
            "Going to parse Spotlight shortcuts under {0} user.".format(user))
        try:
            spotlight_data = plistlib.readPlist(file)
            for k, v in spotlight_data.items():
                shortcut = k
                display_name = spotlight_data[k]['DISPLAY_NAME']
                last_used = spotlight_data[k]['LAST_USED'].isoformat() + "Z"
                url = spotlight_data[k]['URL']

                line_raw = [user, shortcut, display_name, last_used, url]
                line = [x.encode('utf-8') for x in line_raw]

                output.write_entry(line)

        except Exception, e:
            log.error("Could not parse: {0}".format(file))
Example #4
0
def parse_securebookmarks(headers, output):
    secure_bookmarks = multiglob(inputdir, [
        'Users/*/Library/Containers/*/Data/Library/Preferences/*.securebookmarks.plist'
    ])

    for secure_bookmark_file in secure_bookmarks:
        try:
            data = plistlib.readPlist(secure_bookmark_file)
        except Exception, e:
            log.debug('Could not parse securebookmark file {0}: {1}'.format(
                secure_bookmark_file, [traceback.format_exc()]))
            data = None

        if data:
            for k, v in data.items():
                record = OrderedDict((h, '') for h in headers)
                record['src_file'] = secure_bookmark_file
                record['src_name'] = "SecureBookmarks"
                try:
                    record['url'] = k
                    record['name'] = k.split('/')[-1].encode('utf-8')
                except Exception, e:
                    log.debug(
                        "Could not parse securebookmark item for key: {0}".
                        format(k))
                output.write_entry(record.values())
Example #5
0
def parse_sidebarplists(headers, output):
    sidebar_plists = multiglob(
        inputdir, ['Users/*/Library/Preferences/com.apple.sidebarlists.plist'])

    for sblist in sidebar_plists:
        try:
            data = read_bplist(sblist)[0]
        except Exception, e:
            log.debug('Could not parse sidebarplist {0}: {1}'.format(
                sblist, [traceback.format_exc()]))
            data = None

        if data:
            for i in data['systemitems']['VolumesList']:
                record = OrderedDict((h, '') for h in headers)
                record['src_file'] = sblist
                record['src_name'] = "SidebarPlist"
                try:
                    record['name'] = i['Name'].encode('utf-8')
                    if 'Bookmark' in i:
                        record['url'] = 'file:///' + str(i['Bookmark']).split(
                            'file:///')[1].split('\x00')[0]
                    record['source_key'] = 'VolumesList'
                except:
                    log.debug(
                        "Could not parse sidebarplist item: {0}".format(i))
                output.write_entry(record.values())
Example #6
0
def parse_PeriodicItems_rcItems_emondItems(headers, output):
    PeriodicItems = multiglob(inputdir, [
        'private/etc/periodic.conf', 'private/etc/periodic/*/*',
        'private/etc/*.local'
    ])
    rcItems = multiglob(inputdir, ['private/etc/rc.common'])
    emondItems = multiglob(
        inputdir, ['private/etc/emond.d/*', 'private/etc/emond.d/*/*'])

    for i in PeriodicItems + rcItems + emondItems:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "periodic_rules_items"

        output.write_entry(record.values())
Example #7
0
def module():
    _headers = [
        'mtime', 'atime', 'ctime', 'btime', 'src_file', 'user', 'item_index',
        'cmd'
    ]
    output = data_writer(_modName, _headers)

    user_inputdir = multiglob(inputdir, [
        'Users/*/.*_history',
        'Users/*/.bash_sessions/*',
        'private/var/*/.*_history',
        'private/var/*/.bash_sessions/*',
    ])

    # Generate debug messages indicating users with history files to be parsed.
    userlist = []
    for file in user_inputdir:
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        userlist.append(user)

    for u in list(set(userlist)):
        log.debug(
            "Going to parse bash and other history under {0} user.".format(u))

    # Parse history files found.
    for file in user_inputdir:
        # Get username from path.
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        # Parse the files.
        out = stats2(file)
        sess = open(file, 'r').readlines()
        indexer = 0
        for line in sess:
            record = OrderedDict((h, '') for h in _headers)

            for i in _headers:
                if i in out:
                    record[i] = out[i]
                    record['src_file'] = out['name']

            record['user'] = user
            record['cmd'] = line.rstrip()
            indexer += 1
            record['item_index'] = indexer
            output.write_entry(record.values())
Example #8
0
def parse_finderplists(headers, output):
    finder_plists = multiglob(
        inputdir, ['Users/*/Library/Preferences/com.apple.finder.plist'])

    for fplist in finder_plists:
        try:
            data = read_bplist(fplist)[0]
        except Exception, e:
            log.debug('Could not parse finderplist {0}: {1}'.format(
                fplist, [traceback.format_exc()]))
            data = None

        if data:
            try:
                recentfolders = data['FXRecentFolders']
            except KeyError:
                log.debug("Could not find FXRecentFolders key in plist.")
                recentfolders = []

            try:
                moveandcopy = data['RecentMoveAndCopyDestinations']
            except KeyError:
                log.debug("Could not find FXRecentFolders key in plist.")
                moveandcopy = []

            for i in recentfolders:
                record = OrderedDict((h, '') for h in headers)
                record['src_file'] = fplist
                record['src_name'] = "FinderPlist"
                try:
                    record['source_key'] = 'FXRecentFolders'
                    record['name'] = i['name'].encode('utf-8')
                    bkmk = i['file-bookmark']
                    record['url'] = 'file:///' + str(bkmk).split(
                        ';')[-1].split('\x00')[0]
                except Exception, e:
                    log.debug(
                        "Could not parse finderplist item: {0}".format(i))
                output.write_entry(record.values())

            for i in moveandcopy:
                record = OrderedDict((h, '') for h in headers)
                record['src_file'] = fplist
                record['src_name'] = fplist
                try:
                    record['url'] = i
                    record['name'] = i.split('/')[-2].encode('utf-8')
                    record['source_key'] = 'RecentMoveAndCopyDestinations'
                except Exception, e:
                    log.debug(
                        "Could not parse finderplist item: {0}: {1}".format(
                            i, [traceback.format_exc()]))
                output.write_entry(record.values())
Example #9
0
def parse_StartupItems(headers, output):
    StartupItems = multiglob(
        inputdir,
        ['System/Library/StartupItems/*/*', 'Library/StartupItems/*/*'])

    for i in StartupItems:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "startup_items"

        output.write_entry(record.values())
Example #10
0
def parse_ScriptingAdditions(headers, output):
    ScriptingAdditions = multiglob(inputdir, [
        'System/Library/ScriptingAdditions/*.osax',
        'Library/ScriptingAdditions/*.osax',
        'System/Library/ScriptingAdditions/.*.osax',
        'Library/ScriptingAdditions/.*.osax'
    ])

    for i in ScriptingAdditions:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "scripting_additions"
        record['code_signatures'] = str(get_codesignatures(i, ncs))
        output.write_entry(record.values())
Example #11
0
def module():
    headers = [
        'user', 'timestamp', 'bundle_id', 'quarantine_agent', 'download_url',
        'sender_name', 'sender_address', 'typeno', 'origin_title',
        'origin_title', 'origin_url', 'origin_alias'
    ]
    output = data_writer(_modName, headers)

    qevents_list = multiglob(inputdir, [
        'Users/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2',
        'private/var/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'
    ])
    qry = 'SELECT * FROM LSQuarantineEvent'

    if len(qevents_list) == 0:
        log.debug("Files not found in: {0}".format(qevents_loc))

    for i in qevents_list:
        data = query_db(i, qry, outputdir)

        userpath = i.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        for item in data:
            item = list(item)
            record = OrderedDict((h, '') for h in headers)
            record['user'] = user
            record['timestamp'] = cocoa_time(item[1])
            record['bundle_id'] = item[2]
            record['quarantine_agent'] = item[3]
            record['download_url'] = item[4]
            record['sender_name'] = item[5]
            record['sender_address'] = item[6]
            record['typeno'] = str(item[7])
            record['origin_title'] = item[8]
            record['origin_url'] = item[9]
            record['origin_alias'] = item[10]

            line = [
                x.encode('utf-8') if isinstance(x, unicode) else x
                for x in record.values()
            ]
            output.write_entry(line)
Example #12
0
def parse_cron(headers, output):
    cron = multiglob(inputdir, ['private/var/at/tabs/*'])

    for i in cron:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "cron"

        with open(i, 'r') as crontab:
            jobs = [
                c.rstrip() for c in crontab.readlines()
                if not c.startswith("# ")
            ]
            for job in jobs:
                record['program'] = job
                output.write_entry(record.values())
Example #13
0
def parse_LaunchAgentsDaemons(headers, output):
    LaunchAgents = multiglob(inputdir, [
        'System/Library/LaunchAgents/*.plist', 'Library/LaunchAgents/*.plist',
        'Users/*/Library/LaunchAgents/*.plist',
        'System/Library/LaunchAgents/.*.plist',
        'Library/LaunchAgents/.*.plist',
        'Users/*/Library/LaunchAgents/.*.plist'
    ])
    LaunchDaemons = multiglob(inputdir, [
        'System/Library/LaunchDaemons/*.plist',
        'Library/LaunchDaemons/*.plist',
        'System/Library/LaunchDaemons/.*.plist',
        'Library/LaunchDaemons/.*.plist'
    ])

    for i in LaunchDaemons + LaunchAgents:

        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "launch_items"

        try:
            p = plistlib.readPlist(i)
        except:
            try:
                p = read_bplist(i)
            except:
                log.debug('Could not read plist {0}: {1}'.format(
                    i, [traceback.format_exc()]))
                p = 'ERROR'

        if p != 'ERROR':
            if type(p) is list and len(p) > 0:
                p = p[0]

            # Try to get Label from each plist.
            try:
                record['prog_name'] = p['Label']
            except KeyError:
                log.debug("Cannot extract 'Label' from plist: {0}".format(i))
                record['prog_name'] = 'ERROR'

            # Try to get ProgramArguments if present, or Program, from each plist.
            try:
                prog_args = p['ProgramArguments']
                program = p['ProgramArguments'][0]
                record['program'] = program

                if len(prog_args) > 1:
                    record['args'] = ' '.join(p['ProgramArguments'][1:])
            except (KeyError, IndexError), e:
                try:
                    program = p['Program']
                    record['program'] = program
                except:
                    log.debug(
                        "Cannot extract 'Program' or 'ProgramArguments' from plist: {0}"
                        .format(i))
                    program = None
                    record['program'] = 'ERROR'
                    record['args'] = 'ERROR'
            except Exception, e:
                log.debug('Could not parse plist {0}: {1}'.format(
                    i, [traceback.format_exc()]))
                program = None

            # If program is ID'd, run additional checks.
            if program:
                cs_check_path = os.path.join(inputdir, program.lstrip('/'))
                record['code_signatures'] = str(
                    get_codesignatures(cs_check_path, ncs))

                hashset = get_hashes(program)
                record['sha256'] = hashset['sha256']
                record['md5'] = hashset['md5']
Example #14
0
def module(chrome_location):

    # for all chrome dirs on disk, parse their local state files

    profile_headers = [
        'user', 'profile', 'active_time', 'is_using_default_avatar',
        'is_omitted_from_profile_list', 'name', 'gaia_picture_file_name',
        'user_name', 'managed_user_id', 'gaia_name', 'avatar_icon', 'gaia_id',
        'local_auth_credentials', 'gaia_given_name', 'is_using_default_name',
        'background_apps', 'is_ephemeral'
    ]
    profile_output = data_writer('browser_chrome_profiles', profile_headers)

    for c in chrome_location:

        userpath = c.split('/')
        userindex = userpath.index('Users') + 1
        user = userpath[userindex]

        log.debug(
            "Parsing Chrome Local State data under {0} user.".format(user))
        localstate_file = os.path.join(c, 'Local State')
        if os.path.exists(localstate_file):
            with open(localstate_file, 'r') as data:
                jdata = json.loads(data.read())
                chrome_ver = finditem(jdata, "stats_version")
                log.debug("Chrome version {0} identified.".format(chrome_ver))

                profile_data = finditem(jdata, "info_cache")
                parse_profiles(profile_data, user, profile_output,
                               profile_headers)

        else:
            log.debug("File not found: {0}".format(localstate_file))

    # make a full list of all chrome profiles under all chrome dirs
    full_list_raw = [
        multiglob(c, ['Default', 'Profile *', 'Guest Profile'])
        for c in chrome_location
    ]
    full_list = list(itertools.chain.from_iterable(full_list_raw))

    urls_headers = [
        'user', 'profile', 'visit_time', 'title', 'url', 'visit_count',
        'last_visit_time', 'typed_count', 'visit_duration', 'search_term'
    ]
    urls_output = data_writer('browser_chrome_history', urls_headers)

    downloads_headers = [
        'user', 'profile', 'download_path', 'current_path', 'download_started',
        'download_finished', 'danger_type', 'opened', 'last_modified',
        'referrer', 'tab_url', 'tab_referrer_url', 'download_url', 'url'
    ]
    downloads_output = data_writer('browser_chrome_downloads',
                                   downloads_headers)

    for prof in full_list:

        userpath = prof.split('/')
        userindex = userpath.index('Users') + 1
        user = userpath[userindex]

        chromeindex = userpath.index('Chrome') + 1
        profile = userpath[chromeindex]

        log.debug(
            "Starting parsing for Chrome history under {0} user.".format(user))

        history_db = connect_to_db(os.path.join(prof, 'History'))

        if history_db:

            pull_visit_history(history_db, user, profile, urls_output,
                               urls_headers)
            pull_download_history(history_db, user, profile, downloads_output,
                                  downloads_headers)

        try:
            os.remove(os.path.join(outputdir, 'History-tmp'))
        except OSError:
            pass
Example #15
0
def module():

    ver = float('.'.join(OSVersion.split('.')[1:]))

    if ver < 13:
        log.error("Artifacts are not present below OS version 10.13.")
        return

    headers = [
        'src_report', 'diag_start', 'diag_end', 'name', 'uuid', 'processName',
        'appDescription', 'appName', 'appVersion', 'foreground', 'uptime',
        'uptime_parsed', 'powerTime', 'powerTime_parsed', 'activeTime',
        'activeTime_parsed', 'activations', 'launches', 'activityPeriods',
        'idleTimeouts', 'Uptime', 'Count', 'version', 'identifier', 'overflow'
    ]

    output = data_writer(_modName, headers)

    analytics_location = multiglob(inputdir, [
        'Library/Logs/DiagnosticReports/Analytics*.core_analytics',
        'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics'
    ])

    if len(analytics_location) < 1:
        log.debug("No .core_analytics files found.")
    else:
        log.debug("Found {0} .core_analytics files to parse.".format(
            len(analytics_location)))

    counter = 0
    for file in analytics_location:
        data = open(file, 'r').read()
        data_lines = [
            json.loads(i) for i in data.split('\n')
            if i.startswith("{\"message\":")
        ]

        try:
            diag_start = [
                json.loads(i) for i in data.split('\n')
                if i.startswith("{\"_marker\":") and "end-of-file" not in i
            ][0]['startTimestamp']
        except ValueError:
            diag_start = "ERROR"

        try:
            diag_end = [
                json.loads(i) for i in data.split('\n')
                if i.startswith("{\"timestamp\":")
            ][0]['timestamp']
            diag_end = str(parser.parse(diag_end).astimezone(pytz.utc))
            diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z')
        except ValueError:
            diag_end = "ERROR"

        for i in data_lines:
            record = OrderedDict((h, '') for h in headers)
            record['src_report'] = file
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['name'] = i['name']
            record['uuid'] = i['uuid']

            # If any fields not currently recorded (based on the headers above) appear,
            # they will be added to overflow.
            record['overflow'] = {}

            for k, v in i['message'].items():
                if k in record.keys():
                    record[k] = i['message'][k]
                else:
                    record['overflow'].update({k: v})

            if len(record['overflow']) == 0:
                record['overflow'] = ''

            if record['uptime'] != '':
                record['uptime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['uptime']))

            if record['activeTime'] != '':
                record['activeTime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['activeTime']))

            if record['powerTime'] != '':
                record['powerTime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['powerTime']))

            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(
                    ' ||| ')[1]

            line = record.values()
            output.write_entry(line)
            counter += 1

    # Parse aggregate files either from their directory on disk.
    agg_location = glob.glob(
        os.path.join(
            inputdir,
            'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849'
        ))

    if ver > 13.6:
        log.debug(
            "Cannot currently parse aggregate file above OS version 10.13.6.")
        return

    if len(agg_location) < 1:
        log.debug("No aggregate files found.")
    else:
        log.debug("Found {0} aggregate files to parse.".format(
            len(agg_location)))

    for aggregate in agg_location:
        data = open(aggregate, 'r').read()
        try:
            data_lines = json.loads(data)
        except ValueError:
            data_lines = json.loads(json.dumps(list(ast.literal_eval(data))))

        diag_start = stats2(aggregate)['btime']
        diag_end = stats2(aggregate)['mtime']

        raw = [
            i for i in data_lines
            if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7)
        ]
        for i in raw:
            record = OrderedDict((h, '') for h in headers)

            record['src_report'] = aggregate
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['uuid'] = os.path.basename(aggregate)
            record['processName'] = i[0][0]

            record['appDescription'] = i[0][1]
            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(
                    ' ||| ')[1]

            record['foreground'] = i[0][2]

            record['uptime'] = i[1][0]
            record['uptime_parsed'] = time.strftime("%H:%M:%S",
                                                    time.gmtime(i[1][0]))

            record['activeTime'] = i[1][1]
            record['activeTime_parsed'] = time.strftime(
                "%H:%M:%S", time.gmtime(i[1][1]))

            record['launches'] = i[1][2]
            record['idleTimeouts'] = i[1][3]
            record['activations'] = i[1][4]
            record['activityPeriods'] = i[1][5]

            record['powerTime'] = i[1][6]
            record['powerTime_parsed'] = time.strftime("%H:%M:%S",
                                                       time.gmtime(i[1][6]))

            line = record.values()
            output.write_entry(line)
            counter += 1

    if counter > 0:
        log.debug("Done. Wrote {0} lines.".format(counter))
Example #16
0
        user = userpath[userindex]

        log.debug("Starting parsing for Safari under {0} user.".format(user))

        if not os.path.exists(os.path.join(c, 'History.db')):
            log.debug("Did not find History.db under {0} user.".format(user))
            continue

        history_db = connect_to_db(os.path.join(c, 'History.db'),'history_visits')
        recently_closed_plist = os.path.join(c, 'RecentlyClosedTabs.plist')
        if history_db:
            pull_visit_history(recently_closed_plist, history_db, user, history_output, history_headers)

        downloads_plist = os.path.join(c, 'Downloads.plist')
        pull_download_history(downloads_plist, user, downloads_output, downloads_headers)
    
        try:
            os.remove(os.path.join(outputdir, 'History.db-tmp'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-shm'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-wal'))
        except OSError:
            pass


if __name__ == "__main__":
    print "This is an AutoMacTC module, and is not meant to be run stand-alone."
    print "Exiting."
    sys.exit(0)
else:
    safari_location = multiglob(inputdir, ['Users/*/Library/Safari/', 'private/var/*/Library/Safari'])
    module(safari_location)
Example #17
0
def parse_loginitems(headers, output):
    user_loginitems_plist = multiglob(
        inputdir, ['Users/*/Library/Preferences/com.apple.loginitems.plist'])

    for i in user_loginitems_plist:
        record = OrderedDict((h, '') for h in headers)
        metadata = stats2(i, oMACB=True)
        record.update(metadata)
        record['src_file'] = i
        record['src_name'] = "login_items"

        try:
            p = plistlib.readPlist(i)
        except:
            try:
                p = read_bplist(i)
            except:
                log.debug('Could not read plist {0}: {1}'.format(
                    i, [traceback.format_exc()]))
                p = 'ERROR'

        if p != 'ERROR':
            items = p[0]['SessionItems']['CustomListItems']
            for i in items:
                record['prog_name'] = i['Name']
                if 'Alias' in i:
                    try:
                        alias_bin = i['Alias']
                    except:
                        alias_bin = 'ERROR'

                    if alias_bin != 'ERROR':
                        c = [i.encode('hex') for i in alias_bin]
                        for i in range(len(c)):
                            l = int(c[i], 16)
                            if l < len(c) and l > 2:
                                test = os.path.join(inputdir, (''.join(
                                    c[i + 1:i + l + 1])).decode('hex'))
                                try:
                                    if not os.path.exists(test):
                                        continue
                                    else:
                                        record['program'] = test
                                        cs_check_path = os.path.join(
                                            inputdir, test.lstrip('/'))
                                        record['code_signatures'] = str(
                                            get_codesignatures(
                                                cs_check_path, ncs))

                                except:
                                    continue
                                    record['program'] = 'ERROR'
                                    record['code_signatures'] = 'ERROR'

                elif 'Bookmark' in i:
                    try:
                        bookmark_bin = i['Bookmark']
                    except:
                        bookmark_bin = 'ERROR'

                    if bookmark_bin != 'ERROR':
                        program = [i.encode('hex') for i in bookmark_bin]
                        data = Bookmark.from_bytes(
                            ''.join(program).decode('hex'))
                        d = data.get(0xf081, default=None)
                        d = ast.literal_eval(str(d).replace('Data', ''))
                        if d is not None:
                            prog = d.split(';')[-1].replace('\x00', '')
                            record['program'] = prog
                            cs_check_path = os.path.join(
                                inputdir, prog.lstrip('/'))
                            record['code_signatures'] = str(
                                get_codesignatures(cs_check_path, ncs))

                output.write_entry(record.values())
        else:
            errors = {
                k: 'ERROR-CNR-PLIST'
                for k, v in record.items() if v == ''
            }
            record.update(errors)
Example #18
0
# if there are specific directories to exclude, do not recurse them
if dirlist_exclude_dirs != ['']:
    exclude_list = [
        os.path.join(inputdir, i).strip("/")
        for i in default_exclude + dirlist_exclude_dirs
    ]
# if no specific directories are excluded, use default-list (created above)
else:
    exclude_list = [
        os.path.join(inputdir, i).strip("/") for i in default_exclude
    ]

# if NOT running with -f flag for forensic mode, exclude everything in /Volumes/* to prevent recursion of mounted volumes IN ADDITION to other exclusions.
if not forensic_mode:
    exclude_list += [i for i in glob.glob(os.path.join(inputdir, 'Volumes/*'))]
    exclude_list = multiglob(inputdir, exclude_list)
else:
    exclude_list = multiglob('/', exclude_list)

log.debug(
    "The following directories will be excluded from dirlist enumeration: {0}".
    format(exclude_list))

# determine which hashing algorithms to run
if type(hash_alg) is list:
    hash_alg = [''.join([x.lower() for x in i]) for i in hash_alg]
elif type(hash_alg) is str:
    hash_alg = [hash_alg]

counter = 0