Ejemplo n.º 1
0
def module(firefox_location):

    urls_headers = ['user','profile','visit_time','title','url','visit_count','last_visit_time','typed','description']
    urls_output = data_writer('browser_firefox_history', urls_headers)

    downloads_headers = ['user','profile','download_url','download_path','download_started','download_finished','download_totalbytes']
    downloads_output = data_writer('browser_firefox_downloads', downloads_headers)

    for c in firefox_location:
        userpath = c.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        profileindex = userpath.index('Profiles') + 1
        profile = userpath[profileindex]

        log.debug("Starting parsing for Firefox under {0} user.".format(user))

        get_firefox_version(c)

        history_db = connect_to_db(os.path.join(c, 'places.sqlite'),'moz_places')

        # If the database cannot be accessed, or if the main table necessary 
        # for parsing (moz_places) is unavailable,
        # fail gracefully. 
        if history_db:
            pull_visit_history(history_db, user, profile, urls_output, urls_headers)
            pull_download_history(history_db, user, profile, downloads_output, downloads_headers)

        try:
            os.remove(os.path.join(outputdir, 'places.sqlite-tmp'))
            os.remove(os.path.join(outputdir, 'places.sqlite-tmp-shm'))
            os.remove(os.path.join(outputdir, 'places.sqlite-tmp-wal'))
        except OSError:
            pass
Ejemplo n.º 2
0
def module(safari_location):
    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver > 14.0 and forensic_mode is not True:
            log.error("Artifacts are inaccessible on and above OS version 10.14 on live systems.")
            return
    else:
        if forensic_mode is not True:
            log.debug("OSVersion not detected, but going to try to parse anyway.")
        else:
            log.error("OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems.")
            return

    history_headers = ['user', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time', 'recently_closed', 'tab_title', 'date_closed']
    history_output = data_writer('browser_safari_history', history_headers)

    downloads_headers = ['user', 'download_url', 'download_path', 'download_started', 'download_finished', 'download_totalbytes', 'download_bytes_received']
    downloads_output = data_writer('browser_safari_downloads', downloads_headers)

    extensions_headers = ['user', 'name', 'bundle_directory', 'enabled', 'apple_signed', 'developer_id', 'bundle_id', 'ctime', 'mtime', 'atime', 'size']
    extensions_output = data_writer('browser_safari_extensions', extensions_headers)

    for c in safari_location:
        userpath = c.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        log.debug("Starting parsing for Safari under {0} user.".format(user))

        if not os.path.exists(os.path.join(c, 'History.db')):
            log.debug("Did not find History.db under {0} user.".format(user))
            continue

        history_db = connect_to_db(os.path.join(c, 'History.db'), 'history_visits')
        recently_closed_plist = os.path.join(c, 'RecentlyClosedTabs.plist')
        if history_db:
            pull_visit_history(recently_closed_plist, history_db, user, history_output, history_headers)

        downloads_plist = os.path.join(c, 'Downloads.plist')
        pull_download_history(downloads_plist, user, downloads_output, downloads_headers)

        extensions = os.path.join(c, 'Extensions')
        if os.path.exists(extensions):
            pull_extensions(extensions, user, extensions_output, extensions_headers)
        else:
            log.debug("No extensions folder found. Skipping.")

        try:
            os.remove(os.path.join(outputdir, 'History.db-tmp'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-shm'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-wal'))
        except OSError:
            pass
    history_output.flush_record()
    downloads_output.flush_record()
    extensions_output.flush_record()
Ejemplo n.º 3
0
def module():
    firefox_locations = glob.glob(
        os.path.join(inputdir, 'Users/*/Library/Application Support/Firefox/Profiles/*.*'))

    urls_headers = ['user', 'profile', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time', 'typed', 'description']
    urls_output = data_writer('browser_firefox_history', urls_headers)

    downloads_headers = ['user', 'profile', 'download_url', 'download_path', 'download_started', 'download_finished', 'download_totalbytes']
    downloads_output = data_writer('browser_firefox_downloads', downloads_headers)

    extensions_headers = ['user', 'profile', 'name', 'id', 'creator', 'description', 'update_url', 'install_date', 'last_updated', 'source_uri', 'homepage_url']
    extensions_output = data_writer('browser_firefox_extensions', extensions_headers)

    for location in firefox_locations:
        user_path = location.split('/')
        user_index = len(user_path) - 1 - user_path[::-1].index('Users') + 1
        user = user_path[user_index]

        profile_index = user_path.index('Profiles') + 1
        profile = user_path[profile_index]

        log.debug("Starting parsing for Firefox under {0} user at {1}.".format(user, profile))

        get_firefox_version(location, user, profile)

        history_db_filepath = os.path.join(location, 'places.sqlite')
        if SQLiteDB.db_table_exists(history_db_filepath, 'moz_places'):
            try:
                parse_visit_history(location, user, profile, urls_output, urls_headers)
                parse_download_history(location, user, profile, downloads_output, downloads_headers)
            except Exception:
                log.error([traceback.format_exc()])
        else:
            log.debug("Did not find visit or download history for Firefox under {0} user at {1}.".format(user, profile))

        # Firefox Extensions
        extension_db_filepath = os.path.join(location, 'extensions.json')
        if os.path.exists(extension_db_filepath):
            try:
                get_extensions(extension_db_filepath, user, profile, extensions_output, extensions_headers)
            except Exception:
                log.error([traceback.format_exc()])
        else:
            log.debug("Did not find any Firefox extensions for Firefox under {0} user at {1}.".format(user, profile))

        # clean up
        for file in glob.glob(os.path.join(outputdir, '*places.sqlite*')):
            try:
                os.remove(file)
            except OSError as e:
                log.debug('Unable to clean up temp file {0}: '.format(file) + str(e))
                continue

    urls_output.flush_record()
    downloads_output.flush_record()
    extensions_output.flush_record()
Ejemplo n.º 4
0
def module(safari_location):
    if OSVersion is not None:
        ver = float('.'.join(OSVersion.split('.')[1:]))
        if ver > 14.0 and forensic_mode is not True:
            log.error(
                "Artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return
    else:
        if forensic_mode is not True:
            log.debug(
                "OSVersion not detected, but going to try to parse anyway.")
        else:
            log.error(
                "OSVersion not detected, so will not risk parsing as artifacts are inaccessible on and above OS version 10.14 on live systems."
            )
            return

    history_headers = [
        'user', 'visit_time', 'title', 'url', 'visit_count', 'last_visit_time',
        'recently_closed', 'tab_title', 'date_closed'
    ]
    history_output = data_writer('browser_safari_history', history_headers)

    downloads_headers = [
        'user', 'download_url', 'download_path', 'download_started',
        'download_finished', 'download_totalbytes', 'download_bytes_received'
    ]
    downloads_output = data_writer('browser_safari_downloads',
                                   downloads_headers)

    for c in safari_location:
        userpath = c.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        log.debug("Starting parsing for Safari under {0} user.".format(user))

        history_db = connect_to_db(os.path.join(c, 'History.db'),
                                   'history_visits')
        recently_closed_plist = os.path.join(c, 'RecentlyClosedTabs.plist')
        if history_db:
            pull_visit_history(recently_closed_plist, history_db, user,
                               history_output, history_headers)

        downloads_plist = os.path.join(c, 'Downloads.plist')
        pull_download_history(downloads_plist, user, downloads_output,
                              downloads_headers)

        try:
            os.remove(os.path.join(outputdir, 'History.db-tmp'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-shm'))
            os.remove(os.path.join(outputdir, 'History.db-tmp-wal'))
        except OSError:
            pass
Ejemplo n.º 5
0
def module():
	if ("Volumes" not in inputdir and forensic_mode is False):
		headers = ['cmd', 'pid', 'ppid', 'user', 'file_descriptor', 'type', 'device', 'size', 'node', 'access', 'name']
		output = data_writer(_modName, headers)

		# encoding = locale.getpreferredencoding(True)
		names = OrderedDict(zip('cpRLftDsian', 'command pid ppid user fd type device_no size inode access name'.split()))

		lsof = Popen(["lsof", "-n", "-P", "-F{}0".format(''.join(names))], stdout=PIPE, bufsize=-1)

		for line in lsof.stdout:
			try:
				fields = {f[:1].decode('ascii', 'strict'): f[1:].decode() for f in line.split(b'\0') if f.rstrip(b'\n')}
			except UnicodeDecodeError:
				fields = {f[:1].decode('ascii', 'strict'): f[1:] for f in line.split(b'\0') if f.rstrip(b'\n')}
			if 'p' in fields:
				process_info = fields
			elif 'f' in fields:
				fields.update(process_info)
				result = OrderedDict((name, fields.get(id)) for id, name in names.items())
				line = [v for k, v in result.items()]

				output.write_record(line)

		lsof.communicate()
	else:
		log.error("Module did not run: input is not a live system!")
	output.flush_record()
Ejemplo n.º 6
0
def module():
	headers = ['tapID', 'tap_point', 'options', 'event_interest',
               'tapping_process', 'process_tapped', 'enabled']
	output = data_writer(_modName, headers)

	try:
		temp = Quartz.CGGetEventTapList(10,None,None)
		taps = str(temp[1]).split(',')[1:]
		log.debug("Success. Captured event tap list.")
	except IOError:
		log.error("Couldn't grab event taps.")
		taps = []

	log.debug("Parsing event taps.")
	for i in range(len(taps)):
		events = taps[i].split(' ')
		record = OrderedDict((h, '') for h in headers)

		record['tapID'] = events[2].rsplit('=',1)[1]
		record['tap_point'] = events[3].rsplit('=',1)[1]
		record['options'] = events[4].rsplit('=',1)[1]
		record['event_interest'] = events[5].rsplit('=',1)[1]
		record['tapping_process'] = events[6].rsplit('=',1)[1]
		record['process_tapped'] = events[7].rsplit('=',1)[1]
		record['enabled'] = events[8].rsplit('=',1)[1]

		output.write_entry(record.values())

	log.debug("Done.")
Ejemplo n.º 7
0
def moduleFirefox(firefox_cookies_location):
    log.debug("Parsing Firefox cookies...")

    cookies_headers = ['user','profile','host_key','name','value','path',
                        'creation_utc','expires_utc','last_access_utc','is_secure',
                        'is_httponly','browser_element',"same_site"]
    cookies_output = data_writer('browser_firefox_cookies', cookies_headers)

    for c in firefox_cookies_location:
        userpath = c.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        profileindex = userpath.index('Profiles') + 1
        profile = userpath[profileindex]

        log.debug("Starting parsing for Firefox cookies under {0} user.".format(user))

        cookies_db = connect_to_db(os.path.join(c, 'cookies.sqlite'))

        if cookies_db:
            pull_firefox_cookies(cookies_db, user, profile, cookies_output, cookies_headers)
        try:
            os.remove(os.path.join(outputdir, 'cookies.sqlite-tmp'))
            os.remove(os.path.join(outputdir, 'cookies.sqlite-tmp-shm'))
            os.remove(os.path.join(outputdir, 'cookies.sqlite-tmp-wal'))
        except OSError:
            pass

    log.debug("Finished parsing Firefox cookies.")
Ejemplo n.º 8
0
def module():
    headers = [
        'src_name', 'timestamp', 'display_name', 'display_version',
        'package_identifiers', 'process_name'
    ]
    output = data_writer(_modName, headers)

    installhistory_loc = os.path.join(inputdir,
                                      'Library/Receipts/InstallHistory.plist')
    installhistory_list = glob.glob(installhistory_loc)

    if len(installhistory_list) == 0:
        log.debug("File not found: {0}".format(installhistory_loc))

    for file in installhistory_list:
        installhistory = plistlib.readPlist(file)

        for i in range(len(installhistory)):
            record = OrderedDict((h, '') for h in headers)
            record['src_name'] = os.path.basename(file)
            record['timestamp'] = installhistory[i]['date'].strftime(
                '%Y-%m-%dT%H:%M:%SZ')
            record['display_version'] = installhistory[i]['displayVersion']
            record['display_name'] = installhistory[i]['displayName']
            record['package_identifiers'] = installhistory[i][
                'packageIdentifiers']
            record['process_name'] = installhistory[i]['processName']

            line = [
                x.encode('utf-8') if isinstance(x, unicode) else x
                for x in record.values()
            ]
            output.write_entry(line)
Ejemplo n.º 9
0
def moduleChrome(chrome_cookies_location):
    log.debug("Parsing Chrome cookies...")

    # Generate list of all chrome profiles under all chrome directories
    full_list_raw = [multiglob(c, ['Default', 'Profile *', 'Guest Profile']) for c in chrome_cookies_location]
    full_list = list(itertools.chain.from_iterable(full_list_raw))

    # headers from db
    cookies_headers = ['user', 'profile', 'host_key', 'name', 'value', 'path',
                        'creation_utc', 'expires_utc', 'last_access_utc', 'is_secure',
                        'is_httponly', 'has_expires', 'is_persistent', 'priority',
                        'encrypted_value', 'samesite', 'source_scheme']
    cookies_output = data_writer('browser_cookies_chrome', cookies_headers)

    for profile in full_list:
        userpath = profile.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        chromeindex = userpath.index('Chrome') + 1
        prof = userpath[chromeindex]


        log.debug("Starting parsing for Chrome cookies under {0} user.".format(user))

        get_chrome_version(os.path.join(profile, 'Cookies'))
        cookies_db = connect_to_db(os.path.join(profile, 'Cookies'))

        if cookies_db:
            pull_chrome_cookies(cookies_db, user, profile, cookies_output, cookies_headers)
        try:
            os.remove(os.path.join(outputdir, 'Cookies-tmp'))
        except OSError:
            pass
    log.debug("Finished parsing Chrome cookies.")
Ejemplo n.º 10
0
def module():
    _headers = ['type', 'name', 'last_connected', 'security', 'hotspot']
    _output = data_writer(_modName, _headers)

    airport = plistlib.readPlist(os.path.join(inputdir, 'Library/Preferences/SystemConfiguration/com.apple.airport.preferences.plist'))
    interface = plistlib.readPlist(os.path.join(inputdir, 'Library/Preferences/SystemConfiguration/NetworkInterfaces.plist'))
    
    # KEEP THE LINE BELOW TO GENERATE AN ORDEREDDICT BASED ON THE HEADERS
    record = OrderedDict((h, '') for h in _headers)


    for i in airport['KnownNetworks']:
    	record['type'] = "Airport"
    	record['name'] = finditem(airport['KnownNetworks'][i], 'SSIDString')
    	record['last_connected'] = finditem(airport['KnownNetworks'][i], 'LastConnected')
    	record['security'] = finditem(airport['KnownNetworks'][i], 'SecurityType')
    	record['hotspot'] = finditem(airport['KnownNetworks'][i], 'PersonalHotspot')

    	line = record.values()
    	_output.write_entry(line)

    for i in interface['Interfaces']:
        record['type'] = finditem(i, 'BSD Name')
        record['name'] = finditem(i['SCNetworkInterfaceInfo'], 'UserDefinedName')
        record['last_connected'] = ''
        record['security'] = ''
        record['hotspot'] = ''

        line = record.values()
        _output.write_entry(line)
Ejemplo n.º 11
0
def module():
    headers = ['user', 'shortcut', 'display_name', 'last_used', 'url']
    output = data_writer(_modName, headers)

    user_inputdir = glob.glob(os.path.join(inputdir, "Users/*"))
    user_inputdir.append(os.path.join(inputdir, "var/root"))

    spotlight_path = 'Library/Application Support/com.apple.spotlight.Shortcuts'
    for user_home in user_inputdir:

        sl_path = os.path.join(user_home, spotlight_path)
        u_spotlight = glob.glob(sl_path)
        if len(u_spotlight) == 0:
            log.debug("File not found: {0}".format(sl_path))

        for file in u_spotlight:
            try:
                spotlight_data = plistlib.readPlist(file)
                for k, v in spotlight_data.items():
                    user = os.path.basename(user_home)
                    shortcut = k
                    display_name = spotlight_data[k]['DISPLAY_NAME']
                    last_used = spotlight_data[k]['LAST_USED'].isoformat(
                    ) + "Z"
                    url = spotlight_data[k]['URL']

                    line_raw = [user, shortcut, display_name, last_used, url]
                    line = [x.encode('utf-8') for x in line_raw]

                    output.write_entry(line)

            except Exception, e:
                log.error("Could not parse: {0}".format(file))
Ejemplo n.º 12
0
def module():
    headers = ['user', 'shortcut', 'display_name', 'last_used', 'url']
    output = data_writer(_modName, headers)

    user_inputdir = multiglob(inputdir, [
        'Users/*/Library/Application Support/com.apple.spotlight.Shortcuts',
        'private/var/*/Library/Application Support/com.apple.spotlight.Shortcuts'
    ])

    for file in user_inputdir:
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        log.debug(
            "Going to parse Spotlight shortcuts under {0} user.".format(user))
        try:
            spotlight_data = plistlib.readPlist(file)
            for k, v in spotlight_data.items():
                shortcut = k
                display_name = spotlight_data[k]['DISPLAY_NAME']
                last_used = spotlight_data[k]['LAST_USED'].isoformat() + "Z"
                url = spotlight_data[k]['URL']

                line_raw = [user, shortcut, display_name, last_used, url]
                line = [x.encode('utf-8') for x in line_raw]

                output.write_entry(line)

        except Exception, e:
            log.error("Could not parse: {0}".format(file))
Ejemplo n.º 13
0
def module():
	if ("Volumes" not in inputdir and forensic_mode is False):
		headers = ['pid', 'ppid', 'user', 'state', 'proc_start', 'runtime', 'cmd']
		output = data_writer(_modName, headers)

		os.environ['TZ'] = 'UTC0'
		ps_out, e = subprocess.Popen(["ps", "-Ao", "pid,ppid,user,stat,lstart,time,command"], stdout=subprocess.PIPE).communicate()

		if e:
			pass
		else:
			pslist = ps_out.decode('utf-8').split('\n')
			for p in pslist:
				if "PID" not in p and len(p) > 0:
					item = [x.lstrip(' ') for x in filter(None, p.split(' '))]
					pid = item[0]
					ppid = item[1]
					user = item[2]
					state = item[3]
					proc_start = parser.parse(' '.join(item[5:9])).replace(tzinfo=None).isoformat() + 'Z'
					runtime = item[9]
					cmd = ' '.join(item[10:])

					line = [pid, ppid, user, state, proc_start, runtime, cmd]
					output.write_record(line)

	else:
		log.error("Module did not run: input is not a live system!")
	output.flush_record()
Ejemplo n.º 14
0
def module():
    os.environ['TZ'] = 'UTC0'

    headers = [
        'src_file', 'timestamp', 'log_systemname', 'processname', 'pid',
        'message'
    ]
    output = data_writer(_modName, headers)

    asl_loc = os.path.join(inputdir, 'private/var/log/asl/*.asl')
    varlogasl_inputdir = glob.glob(asl_loc)

    if len(varlogasl_inputdir) == 0:
        log.debug("Files not found in: {0}".format(asl_loc))

    for asllog in varlogasl_inputdir:
        FNULL = open(os.devnull, 'w')
        asl_out, e = subprocess.Popen(["syslog", "-f", asllog, '-T', 'utc.3'],
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.STDOUT).communicate()
        if "Invalid Data Store" in asl_out:
            log.debug(
                "Could not parse {0}. Invalid Data Store error reported - file may be corrupted."
                .format(asllog))
            continue
        if not e:
            oasllog = asl_out.split('\n')
            asl_parse(asllog, oasllog, headers, output)
        else:
            log.error("Could not parse ASL logs.")
Ejemplo n.º 15
0
def module():
    headers = [
        'user', 'id', 'terminal_type', 'pid', 'logon_type', 'timestamp',
        'hostname'
    ]
    output = data_writer(_modName, headers)

    utmpx_path = glob.glob(os.path.join(inputdir, 'private/var/run/utmpx'))

    # This is a string version of the struct format
    # https://opensource.apple.com/source/Libc/Libc-1158.50.2/include/NetBSD/utmpx.h.auto.html
    # https://github.com/libyal/dtformats/blob/master/documentation/Utmp%20login%20records%20format.asciidoc
    # https://stackoverflow.com/questions/17244488/reading-struct-in-python-from-created-struct-in-c

    UTMPX_STR = "256s4s32sih2xii256s64x"
    UTMPX_STR_SIZE = calcsize(UTMPX_STR)
    UTMPX_BUFFER_SIZE = 628

    if len(utmpx_path) == 0:
        log.debug("File not found: {0}".format(utmpx_path))

    for path in utmpx_path:
        with open(path, 'rb') as file:
            # set the headers and write it out.
            record = OrderedDict((h, '') for h in headers)
            # Read out header section, but we'll discard it for now.
            header = file.read(UTMPX_BUFFER_SIZE)
            # print header
            # Loop through the rest of the records.
            # First record is always boot time.
            while True:
                buf = file.read(UTMPX_BUFFER_SIZE)
                if len(buf) != UTMPX_STR_SIZE:
                    break
                # Write out the fields
                user, id, terminal_type, pid, logon_code, epoch, usec, host_id = unpack_from(
                    UTMPX_STR, buf)
                # Combine the timestamp fields
                combo_time = datetime.datetime.utcfromtimestamp(
                    epoch) + datetime.timedelta(microseconds=usec)
                utc_combo = pytz.utc.localize(combo_time)
                timestamp_formatted = utc_combo.strftime(
                    '%Y-%m-%dT%H:%M:%S.%fZ')

                if host_id.rstrip('\x00') == '':
                    host = "localhost"
                else:
                    host = host_id.rstrip('\x00')

                # Convert them to an OrderedDict and then create Values View
                record['user'] = user.rstrip('\x00')
                record['id'] = id
                record['terminal_type'] = terminal_type.rstrip('\x00')
                record['pid'] = pid
                record['logon_type'] = decode_logon(logon_code)
                record['timestamp'] = timestamp_formatted
                record['hostname'] = host
                line = record.values()
                # print values
                output.write_entry(line)
Ejemplo n.º 16
0
def module():
    _headers = [
        'mtime', 'atime', 'ctime', 'btime', 'src_file', 'user', 'item_index',
        'cmd'
    ]
    output = data_writer(_modName, _headers)

    user_inputdir = multiglob(inputdir, [
        'Users/*/.*_history',
        'Users/*/.bash_sessions/*',
        'private/var/*/.*_history',
        'private/var/*/.bash_sessions/*',
    ])

    # Generate debug messages indicating users with history files to be parsed.
    userlist = []
    for file in user_inputdir:
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        userlist.append(user)

    for u in list(set(userlist)):
        log.debug(
            "Going to parse bash and other history under {0} user.".format(u))

    # Parse history files found.
    for file in user_inputdir:
        # Get username from path.
        userpath = file.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        # Parse the files.
        out = stats2(file)
        sess = open(file, 'r').readlines()
        indexer = 0
        for line in sess:
            record = OrderedDict((h, '') for h in _headers)

            for i in _headers:
                if i in out:
                    record[i] = out[i]
                    record['src_file'] = out['name']

            record['user'] = user
            record['cmd'] = line.rstrip()
            indexer += 1
            record['item_index'] = indexer
            output.write_entry(record.values())
Ejemplo n.º 17
0
def module():
    # KEEP THIS - ENABLES WRITING OUTPUT FILE.
    _output = data_writer(_modName, _headers)

    # -------------BEGIN MODULE-SPECIFIC LOGIC------------- #
    preferences = plistlib.readPlist(os.path.join(inputdir, 'Library/Preferences/SystemConfiguration/preferences.plist'))
    systemversion = plistlib.readPlist(os.path.join(inputdir, 'System/Library/CoreServices/SystemVersion.plist'))

    # KEEP THE LINE BELOW TO GENERATE AN ORDEREDDICT BASED ON THE HEADERS
    record = OrderedDict((h, '') for h in _headers)

    record['local_hostname'] = full_prefix.split(',')[1]
    record['ipaddress'] = full_prefix.split(',')[2]

    computer_name = finditem(preferences, 'ComputerName')
    if computer_name is not None:
        record['computer_name'] = computer_name.encode('utf-8')
    record['hostname'] = finditem(preferences, 'HostName')
    record['model'] = finditem(preferences, 'Model')
    record['product_version'] = OSVersion
    record['product_build_version'] = finditem(systemversion, 'ProductBuildVersion')

    g = glob.glob(os.path.join(inputdir, 'private/var/folders/zz/zyxvpxvq6csfxvn_n00000sm00006d/C/*'))
    check_dbs = ['consolidated.db', 'cache_encryptedA.db', 'lockCache_encryptedA.db']
    serial_dbs = [loc for loc in g if any(db in loc for db in check_dbs)]
    serial_query = 'SELECT SerialNumber FROM TableInfo;'

    for db in serial_dbs:
        try:
            cursor = sqlite3.connect(db).cursor()
            record['serial_no'] = cursor.execute(serial_query).fetchone()[0]
            break

        except sqlite3.OperationalError:
            record['serial_no'] = 'SERIALERROR0'
            log.error("Could not retrieve system serial number.")

    record['volume_created'] = stats2(inputdir + "/", oMACB=True)['btime']
    record['amtc_runtime'] = str(startTime).replace(' ', 'T').replace('+00:00', 'Z')

    if 'Volumes' not in inputdir and forensic_mode is not True:

        tz, e = subprocess.Popen(["systemsetup", "-gettimezone"], stdout=subprocess.PIPE).communicate()
        record['system_tz'] = tz.rstrip().replace('Time Zone: ', '')

        _fdestatus, e = subprocess.Popen(["fdesetup", "status"], stdout=subprocess.PIPE).communicate()
        if 'On' in _fdestatus:
            record['fvde_status'] = "On"
        else:
            record['fvde_status'] = "Off"
    else:
        record['system_tz'] = "DEAD_DISK"
        record['fvde_status'] = "NA"

    # PROVIDE OUTPUT LINE, AND WRITE TO OUTFILE
    line = record.values()
    _output.write_entry(line)
Ejemplo n.º 18
0
def module():
    if ("Volumes" not in inputdir and forensic_mode is False):

        headers = [
            'protocol', 'recv_q', 'send_q', 'src_ip', 'src_port', 'dst_ip',
            'dst_port', 'state'
        ]
        output = data_writer(_modName, headers)

        netstat_out, e = subprocess.Popen(
            ["netstat", "-f", "inet", "-n"],
            stdout=subprocess.PIPE).communicate()

        if e:
            pass
        else:
            netstat = netstat_out.decode().split('\n')
            for l in netstat:
                if not (l.startswith("Active") or l.startswith("Proto")
                        or len(l) == 0):
                    item = [x.lstrip(' ') for x in filter(None, l.split(' '))]
                    protocol = item[0]
                    recv_q = item[1]
                    send_q = item[2]

                    try:
                        src_ip = '.'.join(item[3].split('.')[0:4])
                    except Exception:
                        src_ip = "ERROR"
                    try:
                        src_port = item[3].split('.')[-1]
                    except Exception:
                        src_port = "ERROR"

                    try:
                        dst_ip = '.'.join(item[4].split('.')[0:4])
                    except Exception:
                        dst_ip = "ERROR"
                    try:
                        dst_port = item[4].split('.')[-1]
                    except Exception:
                        dst_port = "ERROR"

                    if len(item) == 6:
                        state = item[5]
                    else:
                        state = ""

                    line = [
                        protocol, recv_q, send_q, src_ip, src_port, dst_ip,
                        dst_port, state
                    ]
                    output.write_record(line)

    else:
        log.error("Module did not run: input is not a live system!")
    output.flush_record()
Ejemplo n.º 19
0
def moduleChrome(chrome_cookies_location):
    log.debug("[START] Chrome cookies parsing")

    # Generate list of all chrome profiles under all chrome directories
    full_list_raw = [
        multiglob(c, ['Default', 'Profile *', 'Guest Profile'])
        for c in chrome_cookies_location
    ]
    full_list = list(itertools.chain.from_iterable(full_list_raw))

    # headers from db
    cookies_headers = [
        'user', 'profile', 'host_key', 'name', 'value', 'path', 'creation_utc',
        'expires_utc', 'last_access_utc', 'is_secure', 'is_httponly',
        'has_expires', 'is_persistent', 'priority', 'encrypted_value',
        'samesite', 'source_scheme'
    ]
    cookies_output = data_writer('browser_cookies_chrome', cookies_headers)

    for profile in full_list:
        userpath = profile.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        chromeindex = userpath.index('Chrome') + 1
        prof = userpath[chromeindex]

        log.debug(
            "Starting parsing for Chrome cookies for profile {0} under user {1}."
            .format(prof, user))

        db_filepath = os.path.join(profile, 'Cookies')
        db_wrapper = SQLiteDB()
        db_wrapper.open(db_filepath, outputdir)

        # Check if required table exists
        if db_wrapper.table_exists('cookies') is False:
            log.debug("Chrome Cookies required table '{0}' not found.".format(
                'cookies'))
        else:
            get_chrome_version(db_filepath)
            pull_chrome_cookies(db_wrapper, db_filepath, user, profile,
                                cookies_output, cookies_headers)

    # flush output
    cookies_output.flush_record()

    # clean files
    # for file in glob.glob(os.path.join(outputdir, '*Cookies*')):
    #     try:
    #         os.remove(file)
    #     except OSError as e:
    #         log.debug('Unable to clean up temp file {0}: '.format(file) + str(e))
    #         continue

    log.debug("[END] Chrome cookies parsing")
def module():
    if forensic_mode:
        log.error("Module did not run: input is not a live system!")
        return

    output = data_writer("unifiedlogs_live", None)

    predicates = [
        'process == "sudo" && eventMessage CONTAINS[c] "User=root" && (NOT eventMessage CONTAINS[c] "root : PWD=/ ; USER=root") && (NOT eventMessage CONTAINS[c] "    root : PWD=")',  # Captures command line activity run with elevated privileges
        # 'process == "logind"',  # Captures user login events < Large
        # 'process == "tccd"',    # Captures events that indicate permissions and access violations < Very Large
        'process == "sshd" ',  # Captures successful, failed and general ssh activity
        # '(process == "kextd" && sender == "IOKit")',  # Captures successful and failed attempts to add kernel extensions < Large
        '(process == "screensharingd" || process == "ScreensharingAgent")',  # Captures events that indicate successful or failed authentication via screen sharing
        # '(process == "loginwindow" && sender == "Security")',     # Captures keychain.db unlock events < Large
        '(process == "securityd" && eventMessage CONTAINS "Session" && subsystem == "com.apple.securityd")',  # Captures session creation and destruction events
    ]
    predicate = ' OR '.join(predicates)
    predicate = "'" + predicate + "'"

    cmd = 'log show --info --backtrace --debug --loss --signpost --style ndjson --force --timezone UTC --predicate'
    outcmd = '> {0}'.format(
        output.data_file_name.split(output.datatype)[0] + 'json')

    cmd = cmd + " " + predicate + " " + outcmd

    log.debug("Collecting Unified Logs via {0}".format(cmd))

    if sys.version_info[0] < 3:
        outbytes = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT,
                                    shell=True).communicate()
        out = outbytes[0].decode('utf-8').split('\n')
    else:
        outbytes = subprocess.run(cmd,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.STDOUT,
                                  shell=True)
        out = outbytes.stdout.decode('utf-8').split('\n')
    if "Bad predicate" in out[0]:
        log.debug("command was: {0}".format(cmd))
        raise Exception("Failed to collect unified logs: {0}".format(out[0]))

    if output.datatype == "csv":
        log.debug('converting unified logs output to csv')
        from .common import json_to_csv
        json_to_csv.json_file_to_csv(
            output.data_file_name.split(output.datatype)[0] + 'json')
        os.remove(output.data_file_name.split(output.datatype)[0] + 'json')
    elif output.datatype == "all":
        log.debug('converting unified logs output to csv')
        from .common import json_to_csv
        json_to_csv.json_file_to_csv(
            output.data_file_name.split(output.datatype)[0] + 'json')
Ejemplo n.º 21
0
def module():
    known_hosts_headers = [
        'src_name', 'user', 'bits', 'fingerprint', 'host', 'keytype'
    ]
    output = data_writer(_modName, known_hosts_headers)

    user_inputdir = glob.glob(os.path.join(inputdir, "Users/*"))
    user_inputdir.append(os.path.join(inputdir, "var/root"))

    record = OrderedDict((h, '') for h in known_hosts_headers)

    for user_home in user_inputdir:
        record['user'] = os.path.basename(user_home)

        # Gather known_hosts and authorized_users files for the user.
        kh_path = os.path.join(user_home, '.ssh/known_hosts')
        u_knownhosts = glob.glob(kh_path)

        ak_path = os.path.join(user_home, '.ssh/authorized_keys')
        u_authorizedkeys = glob.glob(ak_path)

        # Combine all files found into one list per user.
        u_ssh_all = u_knownhosts + u_authorizedkeys

        # Define the directory checked for use in debug messages.
        user_ssh_dir = os.path.join(user_home, '.ssh/')

        # Generate debug messages for files not found.
        if len(u_knownhosts) == 0:
            log.debug("File not found: {0}".format(kh_path))
        if len(u_authorizedkeys) == 0:
            log.debug("File not found: {0}".format(ak_path))

        # Iterate over files found and parse them using ssh-keygen.
        for file in u_ssh_all:
            p, e = subprocess.Popen(["ssh-keygen", "-l", "-f", file],
                                    stderr=subprocess.STDOUT,
                                    stdout=subprocess.PIPE).communicate()
            record['src_name'] = os.path.basename(file)

            if not e and not "is not a public key file" in p:
                p = p.split('\n')
                p = [x for x in p if len(x) > 0]
                for i in p:
                    data = i.split(' ')
                    record['bits'] = data[0]
                    record['fingerprint'] = data[1]
                    record['host'] = data[2]
                    record['keytype'] = data[3]

                    line = record.values()
                    output.write_entry(line)

            elif "is not a public key file" in p:
                log.debug("Could not parse {0}: {1}".format(file, p))
Ejemplo n.º 22
0
def module():
    headers = [
        'user', 'timestamp', 'bundle_id', 'quarantine_agent', 'download_url',
        'sender_name', 'sender_address', 'typeno', 'origin_title',
        'origin_title', 'origin_url', 'origin_alias'
    ]
    output = data_writer(_modName, headers)

    qevents_list = multiglob(inputdir, [
        'Users/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2',
        'private/var/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'
    ])
    qry = 'SELECT * FROM LSQuarantineEvent'

    if len(qevents_list) == 0:
        log.debug("Files not found in: {0}".format(qevents_loc))

    for i in qevents_list:
        data = query_db(i, qry, outputdir)

        userpath = i.split('/')
        if 'Users' in userpath:
            userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        else:
            userindex = len(userpath) - 1 - userpath[::-1].index('var') + 1
        user = userpath[userindex]

        for item in data:
            item = list(item)
            record = OrderedDict((h, '') for h in headers)
            record['user'] = user
            record['timestamp'] = cocoa_time(item[1])
            record['bundle_id'] = item[2]
            record['quarantine_agent'] = item[3]
            record['download_url'] = item[4]
            record['sender_name'] = item[5]
            record['sender_address'] = item[6]
            record['typeno'] = str(item[7])
            record['origin_title'] = item[8]
            record['origin_url'] = item[9]
            record['origin_alias'] = item[10]

            try:
                line = [
                    x.encode('utf-8') if isinstance(x, unicode) else x
                    for x in record.values()
                ]
            except NameError as e:
                log.debug(e)
                log.debug("Running quarentines with python3 code")
                line = [
                    x if isinstance(x, str) else x for x in record.values()
                ]
            output.write_entry(line)
Ejemplo n.º 23
0
def module():
    headers = ['mtime','atime','ctime','btime','src_name','src_file','prog_name','program','args','code_signatures', 'sha256', 'md5']
    output = data_writer(_modName, headers)

    parse_sandboxed_loginitems(headers, output)
    parse_loginitems(headers, output)
    parse_cron(headers, output)
    parse_LaunchAgentsDaemons(headers, output)
    parse_StartupItems(headers, output)
    parse_ScriptingAdditions(headers, output)
    parse_PeriodicItems_rcItems_emondItems(headers, output)
Ejemplo n.º 24
0
def module():
    headers = [
        'src_file', 'user', 'src_name', 'item_index', 'order', 'name', 'url',
        'source_key'
    ]
    output = data_writer(_modName, headers)

    parse_sfls(headers, output)
    parse_sfl2s(headers, output)
    parse_securebookmarks(headers, output)
    parse_sidebarplists(headers, output)
    parse_finderplists(headers, output)
Ejemplo n.º 25
0
def module():
    # KEEP THIS - ENABLES WRITING OUTPUT FILE.
    _output = data_writer(_modName, _headers)

    # -------------BEGIN MODULE-SPECIFIC LOGIC------------- #
    globalpreferences = read_bplist(
        os.path.join(inputdir, 'Library/Preferences/.GlobalPreferences.plist'))
    preferences = plistlib.readPlist(
        os.path.join(
            inputdir,
            'Library/Preferences/SystemConfiguration/preferences.plist'))
    systemversion = plistlib.readPlist(
        os.path.join(inputdir,
                     'System/Library/CoreServices/SystemVersion.plist'))

    # KEEP THE LINE BELOW TO GENERATE AN ORDEREDDICT BASED ON THE HEADERS
    record = OrderedDict((h, '') for h in _headers)

    record['local_hostname'] = finditem(preferences, 'LocalHostName')
    record['ipaddress'] = full_prefix.split(',')[2]

    computer_name = finditem(preferences, 'ComputerName')
    if computer_name is not None:
        record['computer_name'] = computer_name.encode('utf-8')
    record['hostname'] = finditem(preferences, 'HostName')
    record['model'] = finditem(preferences, 'Model')
    record['product_version'] = OSVersion
    record['product_build_version'] = finditem(systemversion,
                                               'ProductBuildVersion')

    g = glob.glob(
        os.path.join(
            inputdir,
            'private/var/folders/zz/zyxvpxvq6csfxvn_n00000sm00006d/C/*'))
    check_dbs = [
        'consolidated.db', 'cache_encryptedA.db', 'lockCache_encryptedA.db'
    ]
    serial_dbs = [
        loc for loc in g if any(loc.endswith(db) for db in check_dbs)
    ]
    serial_query = 'SELECT SerialNumber FROM TableInfo;'

    for db in serial_dbs:
        try:
            cursor = sqlite3.connect(db).cursor()
            record['serial_no'] = cursor.execute(serial_query).fetchone()[0]
            break

        except Exception, e:
            log.debug("Could not get serial number from {0}: {1}".format(
                db, [traceback.format_exc()]))
            record['serial_no'] = 'ERROR'
Ejemplo n.º 26
0
def module():
    headers = [
        'src_name', 'timestamp', 'display_name', 'display_version',
        'package_identifiers', 'process_name'
    ]
    output = data_writer(_modName, headers)

    installhistory_loc = os.path.join(inputdir,
                                      'Library/Receipts/InstallHistory.plist')
    installhistory_list = glob.glob(installhistory_loc)

    if len(installhistory_list) == 0:
        log.debug("File not found: {0}".format(installhistory_loc))

    for file in installhistory_list:
        installhistoryfile = open(file, 'rb')
        try:
            installhistory = plistlib.load(installhistoryfile)
        except AttributeError as e:
            log.debug(e)
            log.debug("Running python 2 code for this.")
            installhistory = plistlib.readPlist(installhistoryfile)

        for i in range(len(installhistory)):
            record = OrderedDict((h, '') for h in headers)
            record['src_name'] = os.path.basename(file)
            if 'date' in installhistory[i]:
                record['timestamp'] = installhistory[i]['date'].strftime(
                    '%Y-%m-%dT%H:%M:%SZ')
            else:
                record['timestamp'] = ""
            if 'displayVersion' in installhistory[i]:
                record['display_version'] = installhistory[i]['displayVersion']
            else:
                record['display_version'] = ""
            if 'displayName' in installhistory[i]:
                record['display_name'] = installhistory[i]['displayName']
            else:
                record['display_name'] = ""
            if 'packageIdentifiers' in installhistory[i]:
                record['package_identifiers'] = installhistory[i][
                    'packageIdentifiers']
            else:
                record['package_idenfifiers'] = ""
            if 'processName' in installhistory[i]:
                record['process_name'] = installhistory[i]['processName']
            else:
                record['process_name'] = ""

            output.write_record(record)
    output.flush_record()
Ejemplo n.º 27
0
def module():
    _headers = [
        'mtime', 'atime', 'ctime', 'btime', 'src_file', 'user', 'item_index',
        'cmd'
    ]
    output = data_writer(_modName, _headers)

    user_inputdir = glob.glob(os.path.join(inputdir, "Users/*"))
    user_inputdir += glob.glob(os.path.join(inputdir, "var/*"))

    for user_home in user_inputdir:
        # Get username from path.
        user = os.path.basename(user_home)

        # Get all _history files in root of user directory.
        bash_loc = os.path.join(user_home, '.*_history')
        u_bash = glob.glob(bash_loc)
        if len(u_bash) == 0:
            log.debug("Files not found in: {0}".format(bash_loc))

        # Get all bash_sessions files .bash_sessions directory.
        bash_sess_loc = os.path.join(user_home, '.bash_sessions/*')
        u_bash_sess = glob.glob(bash_sess_loc)
        if len(u_bash_sess) == 0:
            log.debug("Files not found in: {0}".format(bash_sess_loc))

        # Combine all files into a list and parse them iteratively.
        if len(u_bash) != 0 or len(u_bash_sess) != 0:
            log.debug(
                "Going to parse bash and other history under {0} user.".format(
                    user))
            u_bash_all = u_bash + u_bash_sess

            for sess in u_bash_all:
                out = stats2(sess)
                sess = open(sess, 'r').readlines()
                indexer = 0
                for line in sess:
                    record = OrderedDict((h, '') for h in _headers)

                    for i in _headers:
                        if i in out:
                            record[i] = out[i]
                            record['src_file'] = out['name']

                    record['user'] = user
                    record['cmd'] = line.rstrip()
                    indexer += 1
                    record['item_index'] = indexer
                    output.write_entry(record.values())
Ejemplo n.º 28
0
def moduleFirefox(firefox_cookies_location):
    log.debug("[START] Firefox cookies parsing")

    cookies_headers = [
        'user', 'profile', 'host_key', 'name', 'value', 'path', 'creation_utc',
        'expires_utc', 'last_access_utc', 'is_secure', 'is_httponly',
        'browser_element', "same_site"
    ]
    cookies_output = data_writer('browser_firefox_cookies', cookies_headers)

    for c in firefox_cookies_location:
        userpath = c.split('/')
        userindex = len(userpath) - 1 - userpath[::-1].index('Users') + 1
        user = userpath[userindex]

        profileindex = userpath.index('Profiles') + 1
        profile = userpath[profileindex]

        log.debug(
            "Starting parsing for Firefox cookies for profile {0} under user {1}."
            .format(profile, user))

        # cookies_db = connect_to_db(os.path.join(c, 'cookies.sqlite'))
        db_filepath = os.path.join(c, 'cookies.sqlite')
        db_wrapper = SQLiteDB()
        db_wrapper.open(db_filepath, outputdir)

        if db_wrapper.table_exists('moz_cookies') is False:
            log.debug(
                "Firefox cookies required table 'moz_cookies' not found.")
        else:
            pull_firefox_cookies(db_wrapper, db_filepath, user, profile,
                                 cookies_output, cookies_headers)

        for file in glob.glob(os.path.join(outputdir, '*cookies.sqlite*')):
            try:
                os.remove(file)
            except OSError as e:
                log.debug('Unable to clean up temp file {0}: '.format(file) +
                          str(e))
                continue

    # flush output
    cookies_output.flush_record()

    log.debug("[END] Firefox cookies parsing")
Ejemplo n.º 29
0
def module():
    headers = ['mtime', 'atime', 'ctime', 'btime', 'date_deleted', 'uniq_id', 'user', 'real_name', 'admin', 'lastloggedin_user']
    output = data_writer(_modName, headers)


    # Parse the com.apple.preferences.accounts.plist to identify deleted accounts.
    _deletedusers_plist = os.path.join(inputdir, 'Library/Preferences/com.apple.preferences.accounts.plist')
    log.debug("Getting deleted users metadata.")
    if not os.path.exists(_deletedusers_plist):
        log.debug("File not found: {0}".format(_deletedusers_plist))
        _deletedusers = []
    else:
        try:
            _deletedusers = read_bplist(_deletedusers_plist)[0]['deletedUsers']
        except Exception, e:
            log.debug("Could not parse: {0}".format(_deletedusers_plist))
            _deletedusers = []
Ejemplo n.º 30
0
def module():
    headers = [
        'user', 'timestamp', 'bundle_id', 'quarantine_agent', 'download_url',
        'sender_name', 'sender_address', 'typeno', 'origin_title',
        'origin_title', 'origin_url', 'origin_alias'
    ]
    output = data_writer(_modName, headers)

    qevents_loc = os.path.join(
        inputdir,
        'Users/*/Library/Preferences/com.apple.LaunchServices.QuarantineEventsV2'
    )
    qevents_list = glob.glob(qevents_loc)
    qry = 'SELECT * FROM LSQuarantineEvent'

    if len(qevents_list) == 0:
        log.debug("Files not found in: {0}".format(qevents_loc))

    for i in qevents_list:
        data = query_db(i, qry, outputdir)

        userpath = i.split('/')
        userindex = userpath.index('Users') + 1
        user = userpath[userindex]

        for item in data:
            item = list(item)
            record = OrderedDict((h, '') for h in headers)
            record['user'] = user
            record['timestamp'] = cocoa_time(item[1])
            record['bundle_id'] = item[2]
            record['quarantine_agent'] = item[3]
            record['download_url'] = item[4]
            record['sender_name'] = item[5]
            record['sender_address'] = item[6]
            record['typeno'] = str(item[7])
            record['origin_title'] = item[8]
            record['origin_url'] = item[9]
            record['origin_alias'] = item[10]

            line = [
                x.encode('utf-8') if isinstance(x, unicode) else x
                for x in record.values()
            ]
            output.write_entry(line)