Beispiel #1
0
def module(safari_location):
    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver > 14.0 and forensic_mode is not True:
            log.error("Artifacts are inaccessible on and above OS version 10.14 on live systems.")
            return
    else:
        if forensic_mode is not True:
            log.debug("OSVersion not detected, but going to try to parse anyway.")
        else:
            log.error("OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems.")
            return

    history_headers = ['user', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time', 'recently_closed', 'tab_title', 'date_closed']
    history_output = data_writer('browser_safari_history', history_headers)

    downloads_headers = ['user', 'download_url', 'download_path', 'download_started', 'download_finished', 'download_totalbytes', 'download_bytes_received']
    downloads_output = data_writer('browser_safari_downloads', downloads_headers)

    extensions_headers = ['user', 'name', 'bundle_directory', 'enabled', 'apple_signed', 'developer_id', 'bundle_id', 'ctime', 'mtime', 'atime', 'size']
    extensions_output = data_writer('browser_safari_extensions', extensions_headers)

    for c in safari_location:
        userpath = c.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        log.debug("Starting parsing for Safari under {0} user.".format(user))

        if not os.path.exists(os.path.join(c, 'History.db')):
            log.debug("Did not find History.db under {0} user.".format(user))
            continue

        history_db = connect_to_db(os.path.join(c, 'History.db'), 'history_visits')
        recently_closed_plist = os.path.join(c, 'RecentlyClosedTabs.plist')
        if history_db:
            pull_visit_history(recently_closed_plist, history_db, user, history_output, history_headers)

        downloads_plist = os.path.join(c, 'Downloads.plist')
        pull_download_history(downloads_plist, user, downloads_output, downloads_headers)

        extensions = os.path.join(c, 'Extensions')
        if os.path.exists(extensions):
            pull_extensions(extensions, user, extensions_output, extensions_headers)
        else:
            log.debug("No extensions folder found. Skipping.")

        try:
            os.remove(os.path.join(outputdir, 'History.db-tmp'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-shm'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-wal'))
        except OSError:
            pass
    history_output.flush_record()
    downloads_output.flush_record()
    extensions_output.flush_record()
Beispiel #2
0
def module(safari_location):
    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver > 14.0 and forensic_mode is not True:
            log.error(
                "Artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return
    else:
        if forensic_mode is not True:
            log.debug(
                "OSVersion not detected, but going to try to parse anyway.")
        else:
            log.error(
                "OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return

    history_headers = [
        'user', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time',
        'recently_closed', 'tab_title', 'date_closed'
    ]
    history_output = data_writer('browser_safari_history', history_headers)

    downloads_headers = [
        'user', 'download_url', 'download_path', 'download_started',
        'download_finished', 'download_totalbytes', 'download_bytes_received'
    ]
    downloads_output = data_writer('browser_safari_downloads',
                                   downloads_headers)

    for c in safari_location:
        userpath = c.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        log.debug("Starting parsing for Safari under {0} user.".format(user))

        history_db = connect_to_db(os.path.join(c, 'History.db'),
                                   'history_visits')
        recently_closed_plist = os.path.join(c, 'RecentlyClosedTabs.plist')
        if history_db:
            pull_visit_history(recently_closed_plist, history_db, user,
                               history_output, history_headers)

        downloads_plist = os.path.join(c, 'Downloads.plist')
        pull_download_history(downloads_plist, user, downloads_output,
                              downloads_headers)

        try:
            os.remove(os.path.join(outputdir, 'History.db-tmp'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-shm'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-wal'))
        except OSError:
            pass
Beispiel #3
0
def module():
    headers = [
        'uid', 'path', 'name', 'last_hit_date', 'hit_count',
        'file_last_modified', 'generator', 'file_size'
    ]
    output = data_writer(_modName, headers)

    q_loc = os.path.join(
        inputdir,
        'private/var/folders/*/*/C/com.apple.QuickLook.thumbnailcache/index.sqlite'
    )
    qlist = glob.glob(q_loc)

    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver > 14.0 and ver is not None and forensic_mode is not True:
            log.error(
                "Artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return
    else:
        if forensic_mode is not True:
            log.debug(
                "OSVersion not detected, but going to try to parse anyway.")
        else:
            log.error(
                "OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return

    if len(qlist) == 0:
        log.debug("Files not found in: {0}".format(q_loc))

    ql_sql = 'SELECT distinct k.folder, k.file_name, t.hit_count, t.last_hit_date, k.version \
              FROM (SELECT rowid AS f_rowid,folder,file_name,version FROM files) k \
              LEFT JOIN thumbnails t ON t.file_id = k.f_rowid ORDER BY t.hit_count DESC'

    for qfile in qlist:

        uid = stats2(qfile)['uid']

        data = query_db(qfile, ql_sql, outputdir)

        for item in data:
            item = list(item)
            record = OrderedDict((h, '') for h in headers)
            record['uid'] = uid
            record['path'] = item[0].encode('utf-8')
            record['name'] = item[1].encode('utf-8')

            if item[3]:
                record['last_hit_date'] = cocoa_time(item[3])
            else:
                record['last_hit_date'] = ''

            if item[2]:
                record['hit_count'] = item[2]
            else:
                record['hit_count'] = ''

            try:
                plist_array = read_stream_bplist(item[4])
                record['file_last_modified'] = cocoa_time(plist_array['date'])
                record['generator'] = plist_array['gen']
                try:
                    record['file_size'] = int(plist_array['size'])
                except KeyError:
                    record['file_size'] = 'Error'
            except Exception, e:
                log.error(
                    "Could not parse: embedded binary plist for record {0}".
                    format(record['name']))

            output.write_entry(record.values())
def module():

    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver < 13:
            log.error("Artifact is not present below OS version 10.13.")
            return
        if ver >= 15:
            # log.warning("Artifact contents and information have changed for macOS 10.15+ - Experimental parsing available, see module file.")
            log.error("Artifact contents and information have changed for macOS 10.15+")
            # return parseCatalina() # WARN: EXPERIMENTAL
            return
    else:
        log.debug("OSVersion not detected, but going to try to parse anyway.")

    headers = ['src_report', 'diag_start', 'diag_end', 'name', 'uuid', 'processName',
               'appDescription', 'appName', 'appVersion', 'foreground', 'uptime',
               'uptime_parsed', 'powerTime', 'powerTime_parsed', 'activeTime', 'activeTime_parsed',
               'activations', 'launches', 'activityPeriods', 'idleTimeouts', 'Uptime',
               'Count', 'version', 'identifier', 'overflow']

    output = data_writer(_modName, headers)

    analytics_location = multiglob(inputdir, ['Library/Logs/DiagnosticReports/Analytics*.core_analytics',
    								          'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics'])

    if len(analytics_location) < 1:
        log.debug("No .core_analytics files found.")
    else:
        log.debug("Found {0} .core_analytics files to parse.".format(len(analytics_location)))

    counter = 0
    for file in analytics_location:
        data = open(file, 'r').read()
        data_lines = [json.loads(i) for i in data.split('\n') if i.startswith("{\"message\":")]

        try:
            diag_start = [json.loads(i) for i in data.split('\n') if
                          i.startswith("{\"_marker\":") and "end-of-file"
                          not in i][0]['startTimestamp']
        except ValueError:
            diag_start = "ERROR"

        try:
            diag_end = [json.loads(i) for i in data.split('\n') if
                        i.startswith("{\"timestamp\":")][0]['timestamp']
            diag_end = str(parser.parse(diag_end).astimezone(timezone.utc))
            diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z')
        except ValueError:
            diag_end = "ERROR"

        for i in data_lines:
            record = OrderedDict((h, '') for h in headers)
            record['src_report'] = file
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['name'] = i['name']
            record['uuid'] = i['uuid']

            # If any fields not currently recorded (based on the headers above) appear,
            # they will be added to overflow.
            record['overflow'] = {}

            for k, v in i['message'].items():
                if k in record.keys():
                    record[k] = i['message'][k]
                else:
                    record['overflow'].update({k: v})

            if len(record['overflow']) == 0:
                record['overflow'] = ''

            if record['uptime'] != '':
                record['uptime_parsed'] = time.strftime("%H:%M:%S",
                                                        time.gmtime(record['uptime']))

            if record['activeTime'] != '':
                record['activeTime_parsed'] = time.strftime("%H:%M:%S",
                                                            time.gmtime(record['activeTime']))

            if record['powerTime'] != '':
                record['powerTime_parsed'] = time.strftime("%H:%M:%S",
                                                           time.gmtime(record['powerTime']))

            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(' ||| ')[1]

            output.write_record(record)
            counter += 1

    # Parse aggregate files either from their directory on disk.
    agg_location = glob.glob(os.path.join(inputdir,'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849'))

    if ver > 13.6:
        log.debug("Cannot currently parse aggregate file above OS version 10.13.6.")
        return

    if len(agg_location) < 1:
        log.debug("No aggregate files found.")
    else:
        log.debug("Found {0} aggregate files to parse.".format(len(agg_location)))

    for aggregate in agg_location:
        data = open(aggregate, 'r').read()
        obj_list = data.split('\n')

        if len(obj_list) > 1:
            obj = [i for i in obj_list if '[[[' in i][0]
            try:
                data_lines = json.loads(obj)
            except ValueError:
                try:
                    data_lines = json.loads(json.dumps(list(ast.literal_eval(obj))))
                except Exception as e:
                    data_lines = []
                    log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))
            except Exception as e:
                data_lines = []
                log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))

        elif len(obj_list) == 1:
            obj = obj_list[0]
            try:
                data_lines = json.loads(obj)
            except ValueError:
                try:
                    data_lines = json.loads(json.dumps(list(ast.literal_eval(obj))))
                except Exception as e:
                    data_lines = []
                    log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))
            except Exception as e:
                data_lines = []
                log.debug("Could not parse aggregate file: {0}.".format([traceback.format_exc()]))

        else:
            data_lines = []
            log.debug("Could not parse aggregate file. File had unusual number of objects to parse: {0}. | {1}".format(str(len(obj_list)), [traceback.format_exc()]))


        diag_start = stats2(aggregate)['btime']
        diag_end = stats2(aggregate)['mtime']

        raw = [i for i in data_lines if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7)]
        for i in raw:
            record = OrderedDict((h, '') for h in headers)

            record['src_report'] = aggregate
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['uuid'] = os.path.basename(aggregate)
            record['processName'] = i[0][0]

            record['appDescription'] = i[0][1]
            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(' ||| ')[1]

            record['foreground'] = i[0][2]

            record['uptime'] = i[1][0]
            record['uptime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][0]))

            record['activeTime'] = i[1][1]
            record['activeTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][1]))

            record['launches'] = i[1][2]
            record['idleTimeouts'] = i[1][3]
            record['activations'] = i[1][4]
            record['activityPeriods'] = i[1][5]

            record['powerTime'] = i[1][6]
            record['powerTime_parsed'] = time.strftime("%H:%M:%S", time.gmtime(i[1][6]))

            output.write_record(record)
            counter += 1

    output.flush_record()
    if counter > 0:
        log.debug("Done. Wrote {0} lines.".format(counter))
Beispiel #5
0
def module():

    ver = float('.'.join(OSVersion.split('.')[1:]))

    if ver < 13:
        log.error("Artifacts are not present below OS version 10.13.")
        return

    headers = [
        'src_report', 'diag_start', 'diag_end', 'name', 'uuid', 'processName',
        'appDescription', 'appName', 'appVersion', 'foreground', 'uptime',
        'uptime_parsed', 'powerTime', 'powerTime_parsed', 'activeTime',
        'activeTime_parsed', 'activations', 'launches', 'activityPeriods',
        'idleTimeouts', 'Uptime', 'Count', 'version', 'identifier', 'overflow'
    ]

    output = data_writer(_modName, headers)

    analytics_location = multiglob(inputdir, [
        'Library/Logs/DiagnosticReports/Analytics*.core_analytics',
        'Library/Logs/DiagnosticReports/Retired/Analytics*.core_analytics'
    ])

    if len(analytics_location) < 1:
        log.debug("No .core_analytics files found.")
    else:
        log.debug("Found {0} .core_analytics files to parse.".format(
            len(analytics_location)))

    counter = 0
    for file in analytics_location:
        data = open(file, 'r').read()
        data_lines = [
            json.loads(i) for i in data.split('\n')
            if i.startswith("{\"message\":")
        ]

        try:
            diag_start = [
                json.loads(i) for i in data.split('\n')
                if i.startswith("{\"_marker\":") and "end-of-file" not in i
            ][0]['startTimestamp']
        except ValueError:
            diag_start = "ERROR"

        try:
            diag_end = [
                json.loads(i) for i in data.split('\n')
                if i.startswith("{\"timestamp\":")
            ][0]['timestamp']
            diag_end = str(parser.parse(diag_end).astimezone(pytz.utc))
            diag_end = diag_end.replace(' ', 'T').replace('+00:00', 'Z')
        except ValueError:
            diag_end = "ERROR"

        for i in data_lines:
            record = OrderedDict((h, '') for h in headers)
            record['src_report'] = file
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['name'] = i['name']
            record['uuid'] = i['uuid']

            # If any fields not currently recorded (based on the headers above) appear,
            # they will be added to overflow.
            record['overflow'] = {}

            for k, v in i['message'].items():
                if k in record.keys():
                    record[k] = i['message'][k]
                else:
                    record['overflow'].update({k: v})

            if len(record['overflow']) == 0:
                record['overflow'] = ''

            if record['uptime'] != '':
                record['uptime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['uptime']))

            if record['activeTime'] != '':
                record['activeTime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['activeTime']))

            if record['powerTime'] != '':
                record['powerTime_parsed'] = time.strftime(
                    "%H:%M:%S", time.gmtime(record['powerTime']))

            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(
                    ' ||| ')[1]

            line = record.values()
            output.write_entry(line)
            counter += 1

    # Parse aggregate files either from their directory on disk.
    agg_location = glob.glob(
        os.path.join(
            inputdir,
            'private/var/db/analyticsd/aggregates/4d7c9e4a-8c8c-4971-bce3-09d38d078849'
        ))

    if ver > 13.6:
        log.debug(
            "Cannot currently parse aggregate file above OS version 10.13.6.")
        return

    if len(agg_location) < 1:
        log.debug("No aggregate files found.")
    else:
        log.debug("Found {0} aggregate files to parse.".format(
            len(agg_location)))

    for aggregate in agg_location:
        data = open(aggregate, 'r').read()
        try:
            data_lines = json.loads(data)
        except ValueError:
            data_lines = json.loads(json.dumps(list(ast.literal_eval(data))))

        diag_start = stats2(aggregate)['btime']
        diag_end = stats2(aggregate)['mtime']

        raw = [
            i for i in data_lines
            if len(i) == 2 and (len(i[0]) == 3 and len(i[1]) == 7)
        ]
        for i in raw:
            record = OrderedDict((h, '') for h in headers)

            record['src_report'] = aggregate
            record['diag_start'] = diag_start
            record['diag_end'] = diag_end
            record['uuid'] = os.path.basename(aggregate)
            record['processName'] = i[0][0]

            record['appDescription'] = i[0][1]
            if record['appDescription'] != '':
                record['appName'] = record['appDescription'].split(' ||| ')[0]
                record['appVersion'] = record['appDescription'].split(
                    ' ||| ')[1]

            record['foreground'] = i[0][2]

            record['uptime'] = i[1][0]
            record['uptime_parsed'] = time.strftime("%H:%M:%S",
                                                    time.gmtime(i[1][0]))

            record['activeTime'] = i[1][1]
            record['activeTime_parsed'] = time.strftime(
                "%H:%M:%S", time.gmtime(i[1][1]))

            record['launches'] = i[1][2]
            record['idleTimeouts'] = i[1][3]
            record['activations'] = i[1][4]
            record['activityPeriods'] = i[1][5]

            record['powerTime'] = i[1][6]
            record['powerTime_parsed'] = time.strftime("%H:%M:%S",
                                                       time.gmtime(i[1][6]))

            line = record.values()
            output.write_entry(line)
            counter += 1

    if counter > 0:
        log.debug("Done. Wrote {0} lines.".format(counter))