Example #1
0
    def run(self):

        self._process_profiles()

        # make a full list of all chrome profiles under all chrome dirs
        full_list_raw = [
            multiglob(c, ['Default', 'Profile *', 'Guest Profile'])
            for c in self.chrome_location
        ]
        full_list = list(itertools.chain.from_iterable(full_list_raw))

        for prof in full_list:

            userpath = prof.split('/')
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
            user = userpath[userindex]

            chromeindex = userpath.index('Chrome') + 1
            profile = userpath[chromeindex]

            self.log.debug(
                "Starting parsing for Chrome history under {0} user.".format(
                    user))

            history_db = self._connect_to_db(os.path.join(prof, 'History'))

            if history_db:
                conn = sqlite3.connect(history_db)
                self._pull_visit_history(conn, user, profile)
                self._pull_download_history(conn, user, profile)

            try:
                os.remove(os.path.join(self.options.outputdir, 'History-tmp'))
            except OSError:
                pass
Example #2
0
    def _parse_sidebarplists(self):
        sidebar_plists = multiglob(
            self.options.inputdir,
            ['Users/*/Library/Preferences/com.apple.sidebarlists.plist'])

        for sblist in sidebar_plists:
            try:
                data = read_bplist(sblist)[0]
            except Exception:
                self.log.debug('Could not parse sidebarplist {0}: {1}'.format(
                    sblist, [traceback.format_exc()]))
                data = None

            if data:
                for i in data['systemitems']['VolumesList']:
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = sblist
                    record['src_name'] = "SidebarPlist"
                    try:
                        record['name'] = i['Name'].encode('utf-8')
                        if 'Bookmark' in i:
                            record['url'] = 'file:///' + str(
                                i['Bookmark']).split('file:///')[1].split(
                                    '\x00')[0]
                        record['source_key'] = 'VolumesList'
                    except:
                        self.log.debug(
                            "Could not parse sidebarplist item: {0}".format(i))
                    self._output.write_entry(record.values())
Example #3
0
    def _parse_sandboxed_loginitems(self):
        sandboxed_loginitems = multiglob(
            self.options.inputdir,
            ['var/db/com.apple.xpc.launchd/disabled.*.plist'])

        for i in sandboxed_loginitems:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "sandboxed_loginitems"

            try:
                p = plistlib.readPlist(i)
            except:
                try:
                    p = read_bplist(i)
                except:
                    self.log.debug('Could not read plist {0}: {1}'.format(
                        i, [traceback.format_exc()]))
                    p = 'ERROR'

            if p != 'ERROR':
                for k, v in p.items():
                    if v is False:
                        record['prog_name'] = k
                        self._output.write_entry(record.values())
            else:
                errors = {
                    k: 'ERROR-CNR-PLIST'
                    for k, v in record.items() if v == ''
                }
                record.update(errors)
Example #4
0
    def _parse_securebookmarks(self):
        secure_bookmarks = multiglob(self.options.inputdir, [
            'Users/*/Library/Containers/*/Data/Library/Preferences/*.securebookmarks.plist'
        ])

        for secure_bookmark_file in secure_bookmarks:
            try:
                data = plistlib.readPlist(secure_bookmark_file)
            except Exception:
                self.log.debug(
                    'Could not parse securebookmark file {0}: {1}'.format(
                        secure_bookmark_file, [traceback.format_exc()]))
                data = None

            if data:
                for k, v in data.items():
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = secure_bookmark_file
                    record['src_name'] = "SecureBookmarks"
                    try:
                        record['url'] = k
                        record['name'] = k.split('/')[-1].encode('utf-8')
                    except Exception:
                        self.log.debug(
                            "Could not parse securebookmark item for key: {0}".
                            format(k))
                    self._output.write_entry(record.values())
Example #5
0
    def _parse_PeriodicItems_rcItems_emondItems(self):
        PeriodicItems = multiglob(self.options.inputdir, [
            'private/etc/periodic.conf', 'private/etc/periodic/*/*',
            'private/etc/*.local'
        ])
        rcItems = multiglob(self.options.inputdir, ['private/etc/rc.common'])
        emondItems = multiglob(
            self.options.inputdir,
            ['private/etc/emond.d/*', 'private/etc/emond.d/*/*'])

        for i in PeriodicItems + rcItems + emondItems:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "periodic_rules_items"

            self._output.write_entry(record.values())
Example #6
0
    def _parse_finderplists(self):
        finder_plists = multiglob(
            self.options.inputdir,
            ['Users/*/Library/Preferences/com.apple.finder.plist'])

        for fplist in finder_plists:
            try:
                data = read_bplist(fplist)[0]
            except Exception:
                self.log.debug('Could not parse finderplist {0}: {1}'.format(
                    fplist, [traceback.format_exc()]))
                data = None

            if data:
                try:
                    recentfolders = data['FXRecentFolders']
                except KeyError:
                    self.log.debug(
                        "Could not find FXRecentFolders key in plist.")
                    recentfolders = []

                try:
                    moveandcopy = data['RecentMoveAndCopyDestinations']
                except KeyError:
                    self.log.debug(
                        "Could not find FXRecentFolders key in plist.")
                    moveandcopy = []

                for i in recentfolders:
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = fplist
                    record['src_name'] = "FinderPlist"
                    try:
                        record['source_key'] = 'FXRecentFolders'
                        record['name'] = i['name'].encode('utf-8')
                        bkmk = i['file-bookmark']
                        record['url'] = 'file:///' + str(bkmk).split(
                            ';')[-1].split('\x00')[0]
                    except Exception:
                        self.log.debug(
                            "Could not parse finderplist item: {0}".format(i))
                    self._output.write_entry(record.values())

                for i in moveandcopy:
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = fplist
                    record['src_name'] = fplist
                    try:
                        record['url'] = i
                        record['name'] = i.split('/')[-2].encode('utf-8')
                        record['source_key'] = 'RecentMoveAndCopyDestinations'
                    except Exception:
                        self.log.debug(
                            "Could not parse finderplist item: {0}: {1}".
                            format(i, [traceback.format_exc()]))
                    self._output.write_entry(record.values())
Example #7
0
    def _parse_StartupItems(self):
        StartupItems = multiglob(
            self.options.inputdir,
            ['System/Library/StartupItems/*/*', 'Library/StartupItems/*/*'])

        for i in StartupItems:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "startup_items"
            self._output.write_entry(record.values())
Example #8
0
    def _parse_sfls(self):
        sfl_list = multiglob(self.options.inputdir, [
            'Users/*/Library/Application Support/com.apple.sharedfilelist/*.sfl',
            'Users/*/Library/Application Support/com.apple.sharedfilelist/*/*.sfl'
        ])

        for mru_file in sfl_list:
            plist_objects = ccl_bplist.deserialise_NsKeyedArchiver(
                ccl_bplist.load(open(mru_file, "rb")),
                parse_whole_structure=True)
            try:
                if plist_objects["root"]["NS.objects"][1]["NS.keys"][
                        0] == "com.apple.LSSharedFileList.MaxAmount":
                    numberOfItems = plist_objects["root"]["NS.objects"][1][
                        "NS.objects"][0]
            except Exception:
                pass

            try:
                if plist_objects["root"]["NS.keys"][2] == "items":
                    items = plist_objects["root"]["NS.objects"][2][
                        "NS.objects"]
            except Exception:
                self.log.debug('Could not parse SFL {0}: {1}'.format(
                    mru_file, [traceback.format_exc()]))
                items = None

            if items:
                for n, item in enumerate(items):
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = mru_file
                    record['src_name'] = "SharedFileList"
                    try:
                        try:
                            name = item["name"].encode('utf-8')
                        except Exception:
                            name = ''
                        record['name'] = name
                        record['item_index'] = str(n)
                        record['order'] = item['order']
                        record['url'] = item['URL']['NS.relative']

                    except Exception:
                        self.log.debug(
                            "Could not parse SFL item: {0}".format(item))

                    self._output.write_entry(record.values())
Example #9
0
    def _parse_ScriptingAdditions(self):
        ScriptingAdditions = multiglob(self.options.inputdir, [
            'System/Library/ScriptingAdditions/*.osax',
            'Library/ScriptingAdditions/*.osax',
            'System/Library/ScriptingAdditions/.*.osax',
            'Library/ScriptingAdditions/.*.osax'
        ])

        for i in ScriptingAdditions:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "scripting_additions"
            record['code_signatures'] = str(
                get_codesignatures(i, self.options.dir_no_code_signatures))
            self._output.write_entry(record.values())
Example #10
0
    def _parse_cron(self):
        cron = multiglob(self.options.inputdir, ['private/var/at/tabs/*'])

        for i in cron:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "cron"

            with open(i, 'r') as crontab:
                jobs = [
                    c.rstrip() for c in crontab.readlines()
                    if not c.startswith("# ")
                ]
                for job in jobs:
                    record['program'] = job
                    self._output.write_entry(record.values())
Example #11
0
    def run(self):
        output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options)

        # if there are specific directories to recurse, recurse them.
        if self.options.dir_include_dirs != ['']:
            root_list = []
            for i in self.options.dir_include_dirs:
                root_list.append(os.path.join(self.options.inputdir, i))

            root_list = list(itertools.chain.from_iterable([glob.glob(i) for i in root_list]))
        # if there are no specific directories to recurse, recurse from the root of the inputdir. also write the stats data to
        else:
            root_list = glob.glob(self.options.inputdir)
            record = OrderedDict((h, '') for h in self._headers)
            stat_data = stats2(self.options.inputdir)
            record.update(stat_data)
            output.write_entry(record.values())

        # by default (if no-defaults is NOT in exclusion flag) exclude the following directories
        if 'no-defaults' not in self.options.dir_exclude_dirs:
            if not self.options.forensic_mode:
                default_exclude = [
                    '.fseventsd', '.DocumentRevisions-V100', '.Spotlight-V100',
                    'Users/*/Pictures', 'Users/*/Library/Application Support/AddressBook',
                    'Users/*/Calendar', 'Users/*/Library/Calendars',
                    'Users/*/Library/Preferences/com.apple.AddressBook.plist'
                ]
            else:
                default_exclude = ['.fseventsd', '.DocumentRevisions-V100', '.Spotlight-V100']

        # if no-defaults is in the exclusion flag, remove no-defaults and use the user-provided exclusion list
        else:
            default_exclude = []
            self.options.dir_exclude_dirs.remove('no-defaults')


        # if there are specific directories to exclude, do not recurse them
        if self.options.dir_exclude_dirs != ['']:
            exclude_list = [os.path.join(self.options.inputdir, i).strip("/") for i in default_exclude + self.options.dir_exclude_dirs]
        # if no specific directories are excluded, use default-list (created above)
        else:
            exclude_list = [os.path.join(self.options.inputdir, i).strip("/") for i in default_exclude]

        # if NOT running with -f flag for forensic mode, exclude everything in /Volumes/* to prevent recursion of mounted volumes IN ADDITION to other exclusions.
        if not self.options.forensic_mode:
            exclude_list += [i for i in glob.glob(os.path.join(self.options.inputdir, 'Volumes/*'))]
            exclude_list = multiglob(self.options.inputdir, exclude_list)
        else:
            exclude_list = multiglob('/', exclude_list)

        self.log.debug("The following directories will be excluded from dirlist enumeration: {0}".format(exclude_list))

        filePool = ThreadPool(4)
        for i in root_list:
            for root, dirs, files in os.walk(i, topdown=True):

                # prune excluded directories and files to prevent further recursion into them
                dirs[:] = [d for d in dirs if os.path.join(root,d) not in exclude_list]
                files[:] = [f for f in files if os.path.join(root,f) not in exclude_list]

                # do not recurse into bundles that end with any of the file extensions below UNLESS told to at amtc runtime
                exc_bundles = ('.app', '.framework','.lproj','.plugin','.kext','.osax','.bundle','.driver','.wdgt')
                if root.strip().endswith(exc_bundles) and not (os.path.basename(root)).startswith('.') and self.options.dir_recurse_bundles == False:
                    dirs[:] = []
                    files[:] = []

                if self.options.dir_no_multithreading:
                    file_data = [self._handle_files(root, file_item) for file_item in files]
                else:
                    file_data = self._filePooler(root, filePool, files)

                for record in file_data:
                    wf = record['wherefrom_1']
                    if wf != ['']:
                        try:
                            parsed_wf = read_stream_bplist(wf)
                            parsed_wf_utf8 = [str(a.encode('utf-8')) for a in parsed_wf if a != ""]
                        except:
                            pathname = os.path.join(record['path'],record['name'])
                            parsed_wf_utf8 = ['ERROR']
                            self.log.debug("Could not parse embedded binary plist for kMDItemWhereFroms data from file {0}. {1}".format(pathname,[traceback.format_exc()]))

                        if len(parsed_wf_utf8) > 0:
                            record['wherefrom_1'] = parsed_wf_utf8[0]
                        if len(parsed_wf_utf8) > 1:
                            record['wherefrom_2'] = parsed_wf_utf8[1]
                        else:
                            record['wherefrom_1'] = ''
                    else:
                        record['wherefrom_1'] = ''

                    output.write_entry(record.values())

                # bundles that will be code-sig checked
                check_signatures_bundles = ('.app','.kext','.osax')
                for name in dirs:
                    self._counter += 1
                    if not self.options.quiet:
                        if self.options.debug:
                            sys.stdout.write('dirlist        : INFO     Wrote %d lines in %s | FileName: %s \033[K\r' % (self._counter, datetime.now(pytz.UTC)-self.options.start_time, name))
                        else:
                            sys.stdout.write('dirlist        : INFO     Wrote %d lines in %s \r' % (self._counter, datetime.now(pytz.UTC)-self.options.start_time))
                        sys.stdout.flush()

                    # get timestamps and metadata for each file
                    record = OrderedDict((h, '') for h in self._headers)
                    stat_data = stats2(os.path.join(root, name))
                    record.update(stat_data)

                    # directory is bundle that ends with either of the three extensions, check its code signatures
                    if self.options.dir_no_code_signatures is False and name.endswith(check_signatures_bundles) and not name.startswith('.'): #meaning DO process code signatures
                        record['code_signatures'] = str(get_codesignatures(os.path.join(root, name)))

                    output.write_entry(record.values())

        filePool.close()
        filePool.join()

        if not self.options.quiet:
            sys.stdout.write('\n')
            sys.stdout.flush()
Example #12
0
    def _parse_loginitems(self):
        user_loginitems_plist = multiglob(
            self.options.inputdir,
            ['Users/*/Library/Preferences/com.apple.loginitems.plist'])

        for i in user_loginitems_plist:
            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "login_items"

            try:
                p = plistlib.readPlist(i)
            except:
                try:
                    p = read_bplist(i)
                except:
                    self.log.debug('Could not read plist {0}: {1}'.format(
                        i, [traceback.format_exc()]))
                    p = 'ERROR'

            if p != 'ERROR':
                items = p[0]['SessionItems']['CustomListItems']
                for i in items:
                    record['prog_name'] = i['Name']
                    if 'Alias' in i:
                        try:
                            alias_bin = i['Alias']
                        except:
                            alias_bin = 'ERROR'

                        if alias_bin != 'ERROR':
                            c = [i.encode('hex') for i in alias_bin]
                            for i in range(len(c)):
                                l = int(c[i], 16)
                                if l < len(c) and l > 2:
                                    test = os.path.join(
                                        self.options.inputdir, (''.join(
                                            c[i + 1:i + l + 1])).decode('hex'))
                                    try:
                                        if not os.path.exists(test):
                                            continue
                                        else:
                                            record['program'] = test
                                            cs_check_path = os.path.join(
                                                self.options.inputdir,
                                                test.lstrip('/'))
                                            record['code_signatures'] = str(
                                                get_codesignatures(
                                                    cs_check_path,
                                                    self.options.
                                                    dir_no_code_signatures))

                                    except:
                                        continue
                                        record['program'] = 'ERROR'
                                        record['code_signatures'] = 'ERROR'

                    elif 'Bookmark' in i:
                        try:
                            bookmark_bin = i['Bookmark']
                        except:
                            bookmark_bin = 'ERROR'

                        if bookmark_bin != 'ERROR':
                            program = [i.encode('hex') for i in bookmark_bin]
                            data = Bookmark.from_bytes(
                                ''.join(program).decode('hex'))
                            d = data.get(0xf081, default=None)
                            d = ast.literal_eval(str(d).replace('Data', ''))
                            if d is not None:
                                prog = d.split(';')[-1].replace('\x00', '')
                                record['program'] = prog
                                cs_check_path = os.path.join(
                                    self.options.inputdir, prog.lstrip('/'))
                                record['code_signatures'] = str(
                                    get_codesignatures(
                                        cs_check_path,
                                        self.options.dir_no_code_signatures))

                    self._output.write_entry(record.values())
            else:
                errors = {
                    k: 'ERROR-CNR-PLIST'
                    for k, v in record.items() if v == ''
                }
                record.update(errors)
Example #13
0
    def _parse_LaunchAgentsDaemons(self):
        LaunchAgents = multiglob(self.options.inputdir, [
            'System/Library/LaunchAgents/*.plist',
            'Library/LaunchAgents/*.plist',
            'Users/*/Library/LaunchAgents/*.plist',
            'System/Library/LaunchAgents/.*.plist',
            'Library/LaunchAgents/.*.plist',
            'Users/*/Library/LaunchAgents/.*.plist'
        ])
        LaunchDaemons = multiglob(self.options.inputdir, [
            'System/Library/LaunchDaemons/*.plist',
            'Library/LaunchDaemons/*.plist',
            'System/Library/LaunchDaemons/.*.plist',
            'Library/LaunchDaemons/.*.plist'
        ])

        for i in LaunchDaemons + LaunchAgents:

            record = OrderedDict((h, '') for h in self._headers)
            metadata = stats2(i, oMACB=True)
            record.update(metadata)
            record['src_file'] = i
            record['src_name'] = "launch_items"

            try:
                p = plistlib.readPlist(i)
            except:
                try:
                    p = read_bplist(i)
                except:
                    self.log.debug('Could not read plist {0}: {1}'.format(
                        i, [traceback.format_exc()]))
                    p = 'ERROR'

            if p != 'ERROR':
                if type(p) is list and len(p) > 0:
                    p = p[0]

                # Try to get Label from each plist.
                try:
                    record['prog_name'] = p['Label']
                except KeyError:
                    self.log.debug(
                        "Cannot extract 'Label' from plist: {0}".format(i))
                    record['prog_name'] = 'ERROR'

                # Try to get ProgramArguments if present, or Program, from each plist.
                try:
                    prog_args = p['ProgramArguments']
                    program = p['ProgramArguments'][0]
                    record['program'] = program

                    if len(prog_args) > 1:
                        record['args'] = ' '.join(p['ProgramArguments'][1:])
                except (KeyError, IndexError):
                    try:
                        program = p['Program']
                        record['program'] = program
                    except:
                        self.log.debug(
                            "Cannot extract 'Program' or 'ProgramArguments' from plist: {0}"
                            .format(i))
                        program = None
                        record['program'] = 'ERROR'
                        record['args'] = 'ERROR'
                except Exception:
                    self.log.debug('Could not parse plist {0}: {1}'.format(
                        i, [traceback.format_exc()]))
                    program = None

                # If program is ID'd, run additional checks.
                if program:
                    cs_check_path = os.path.join(self.options.inputdir,
                                                 program.lstrip('/'))
                    record['code_signatures'] = str(
                        get_codesignatures(
                            cs_check_path,
                            self.options.dir_no_code_signatures))

                    hashset = self._get_hashes(program)
                    record['sha256'] = hashset['sha256']
                    record['md5'] = hashset['md5']

            else:
                errors = {
                    k: 'ERROR-CNR-PLIST'
                    for k, v in record.items() if v == ''
                }
                record.update(errors)

            self._output.write_entry(record.values())
    def run(self):
        output = DataWriter(self.module_name(), self._headers, self.log, self.run_id, self.options)

        if self.options.os_version is not None:
            ver = float('.'.join(self.options.os_version.split('.')[1:]))
            if ver < 13:
                self.log.error("Artifacts are not present below OS version 10.13.")
                return
        else:
            self.log.debug("OSVersion not detected, but going to try to parse anyway.")

        analytics_location = multiglob(self.options.inputdir, ['Library/Logs/DiagnosticReports/Analytics*.core_analytics',
        								          'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics'])

        if len(analytics_location) < 1:
            self.log.debug("No .core_analytics files found.")
        else:
            self.log.debug("Found {0} .core_analytics files to parse.".format(len(analytics_location)))

        counter = 0
        for file in analytics_location:
            data = open(file, 'r').read()
            data_lines = [json.loads(i) for i in data.split('\n') if i.startswith("{\"message\":")]

            try:
                diag_start = [json.loads(i) for i in data.split('\n') if
                              i.startswith("{\"_marker\":") and "end-of-file"
                              not in i][0]['startTimestamp']
            except ValueError:
                diag_start = "ERROR"

            try:
                diag_end = [json.loads(i) for i in data.split('\n') if
                            i.startswith("{\"timestamp\":")][0]['timestamp']
                diag_end = str(parser.parse(diag_end).astimezone(pytz.utc))
                diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z')
            except ValueError:
                diag_end = "ERROR"

            for i in data_lines:
                record = OrderedDict((h, '') for h in self._headers)
                record['src_report'] = file
                record['diag_start'] = diag_start
                record['diag_end'] = diag_end
                record['name'] = i['name']
                record['uuid'] = i['uuid']

                # If any fields not currently recorded (based on the headers above) appear,
                # they will be added to overflow.
                record['overflow'] = {}

                for k, v in i['message'].items():
                    if k in record.keys():
                        record[k] = i['message'][k]
                    else:
                        record['overflow'].update({k: v})

                if len(record['overflow']) == 0:
                    record['overflow'] = ''

                if record['uptime'] != '':
                    record['uptime_parsed'] = time.strftime("%H:%M:%S",
                                                            time.gmtime(record['uptime']))

                if record['activeTime'] != '':
                    record['activeTime_parsed'] = time.strftime("%H:%M:%S",
                                                                time.gmtime(record['activeTime']))

                if record['powerTime'] != '':
                    record['powerTime_parsed'] = time.strftime("%H:%M:%S",
                                                               time.gmtime(record['powerTime']))

                if record['appDescription'] != '':
                    record['appName'] = record['appDescription'].split(' ||| ')[0]
                    record['appVersion'] = record['appDescription'].split(' ||| ')[1]

                line = record.values()
                output.write_entry(line)
                counter += 1

        # Parse aggregate files either from their directory on disk.
        agg_location = glob.glob(os.path.join(self.options.inputdir,'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849'))

        if ver > 13.6:
            self.log.debug("Cannot currently parse aggregate file above OS version 10.13.6.")
            return

        if len(agg_location) < 1:
            self.log.debug("No aggregate files found.")
        else:
            self.log.debug("Found {0} aggregate files to parse.".format(len(agg_location)))

        for aggregate in agg_location:
            data = open(aggregate, 'r').read()
            obj_list = data.split('\n')

            if len(obj_list) > 1:
                obj = [i for i in obj_list if '[[[' in i][0]
                try:
                    data_lines = json.loads(obj)
                except ValueError:
                    try:
                        data_lines = json.loads(json.dumps(list(ast.literal_eval(obj))))
                    except Exception:
                        data_lines = []
                        self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))
                except Exception:
                    data_lines = []
                    self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))

            elif len(obj_list) == 1:
                obj = obj_list[0]
                try:
                    data_lines = json.loads(obj)
                except ValueError:
                    try:
                        data_lines = json.loads(json.dumps(list(ast.literal_eval(obj))))
                    except Exception:
                        data_lines = []
                        self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))
                except Exception:
                    data_lines = []
                    self.log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))

            else:
                data_lines = []
                self.log.debug("Could not parse aggregate file. File had unusual number of objects to parse: {0}. | {1}".format(str(len(obj_list)), [traceback.format_exc()]))


            diag_start = stats2(aggregate)['btime']
            diag_end = stats2(aggregate)['mtime']

            raw = [i for i in data_lines if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7)]
            for i in raw:
                record = OrderedDict((h, '') for h in self._headers)

                record['src_report'] = aggregate
                record['diag_start'] = diag_start
                record['diag_end'] = diag_end
                record['uuid'] = os.path.basename(aggregate)
                record['processName'] = i[0][0]

                record['appDescription'] = i[0][1]
                if record['appDescription'] != '':
                    record['appName'] = record['appDescription'].split(' ||| ')[0]
                    record['appVersion'] = record['appDescription'].split(' ||| ')[1]

                record['foreground'] = i[0][2]

                record['uptime'] = i[1][0]
                record['uptime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][0]))

                record['activeTime'] = i[1][1]
                record['activeTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][1]))

                record['launches'] = i[1][2]
                record['idleTimeouts'] = i[1][3]
                record['activations'] = i[1][4]
                record['activityPeriods'] = i[1][5]

                record['powerTime'] = i[1][6]
                record['powerTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][6]))

                line = record.values()
                output.write_entry(line)
                counter += 1

        if counter > 0:
            self.log.debug("Done. Wrote {0} lines.".format(counter))
Example #15
0
    def _parse_sfl2s(self):
        sfl2_list = multiglob(self.options.inputdir, [
            'Users/*/Library/Application Support/com.apple.sharedfilelist/*.sfl2',
            'Users/*/Library/Application Support/com.apple.sharedfilelist/*/*.sfl2'
        ])

        for mru_file in sfl2_list:
            plist_objects = ccl_bplist.deserialise_NsKeyedArchiver(
                ccl_bplist.load(open(mru_file, "rb")),
                parse_whole_structure=True)

            try:
                if plist_objects["root"]["NS.objects"][1]["NS.keys"][
                        0] == "com.apple.LSSharedFileList.MaxAmount":
                    numberOfItems = plist_objects["root"]["NS.objects"][1][
                        "NS.objects"][0]
            except Exception:
                pass

            try:
                if plist_objects["root"]["NS.keys"][0] == "items":
                    items = plist_objects["root"]["NS.objects"][0][
                        "NS.objects"]
            except Exception:
                self.log.debug('Could not parse SFL {0}: {1}'.format(
                    mru_file, [traceback.format_exc()]))
                items = None

            if items:
                for n, item in enumerate(items):
                    record = OrderedDict((h, '') for h in self._headers)
                    record['src_file'] = mru_file
                    record['src_name'] = "SharedFileList"

                    try:

                        attribute_keys = plist_objects["root"]["NS.objects"][
                            0]["NS.objects"][n]["NS.keys"]
                        attribute_values = plist_objects["root"]["NS.objects"][
                            0]["NS.objects"][n]["NS.objects"]
                        attributes = dict(zip(attribute_keys,
                                              attribute_values))

                        try:
                            name = str(attributes['Name']).encode('utf-8')
                        except:
                            name = ''

                        if 'Bookmark' in attributes:
                            try:
                                url = [
                                    'file://' + x.split(';')[-1] for x in
                                    attributes['Bookmark'].split('\x00')
                                    if x != '' and ';' in x
                                ][0]
                            except:
                                try:
                                    url = ', '.join([
                                        'file://' + x.split(';')[-1] for x in [
                                            x for x in attributes['Bookmark']
                                            ['NS.data'].split('\x00')
                                            if x != '' and ';' in x
                                        ]
                                    ])
                                except:
                                    try:
                                        url = [
                                            x for x in attributes['Bookmark'].
                                            split('\x00')
                                            if x != '' and x.startswith('x')
                                        ][0]
                                    except:
                                        url = 'ERROR-COULDNOTPARSE'
                        else:
                            url = 'ERROR-NODATA'

                        record['item_index'] = str(n)
                        record['name'] = name
                        record['url'] = url

                    except Exception:
                        self.log.debug(
                            "Could not parse SFL item: {0}".format(item))

                    self._output.write_entry(record.values())