def get_powerlogAggnotifications(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
        NOTIFICATIONBUNDLEID AS "BULLETIN BUNDLE ID",
        TIMEINTERVAL / 60 AS "TIME INTERVAL IN SECONDS",
        COUNT AS "COUNT",
        NOTIFICATIONTYPE AS "NOTIFICATION TYPE",
        ID AS "PLSPRINGBOARDAGENT_AGGREGATE_SBNOTIFICATIONS_AGGREGATE TABLE ID" 
    FROM
        PLSPRINGBOARDAGENT_AGGREGATE_SBNOTIFICATIONS_AGGREGATE 
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))

        report = ArtifactHtmlReport('Powerlog Aggregate Notifications')
        report.start_artifact_report(report_folder, 'Aggregate Notifications')
        report.add_script()
        data_headers = ('Timestamp', 'Notification Bundle ID',
                        'Time Interval in Seconds', 'Count',
                        'Notification Type', 'Aggregate Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Powerlog Agg Notifications'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 2
0
def get_userDict(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select 
    word,
    frequency,
    locale,
    appid,
    shortcut
    from words
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('User Dictionary')
        report.start_artifact_report(report_folder, 'User Dictionary')
        report.add_script()
        data_headers = (
            'Word', 'Frequency', 'Locale', 'AppID', 'Shortcut'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'user dictionary'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No User Dictionary data available')

    db.close()
    return
Esempio n. 3
0
def get_accs(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute("""
    select
    datetime(zdate+978307200,'unixepoch','utc' ),
    zaccounttypedescription,
    zusername,
    zaccountdescription,
    zaccount.zidentifier,
    zaccount.zowningbundleid
    from zaccount, zaccounttype 
    where zaccounttype.z_pk=zaccount.zaccounttype
    """)

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
        report = ArtifactHtmlReport('Account Data')
        report.start_artifact_report(report_folder, 'Account Data')
        report.add_script()
        data_headers = ('Timestamp', 'Account Desc.', 'Username',
                        'Description', 'Identifier', 'Bundle ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Account Data'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Account Data'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc("No Account Data available")
Esempio n. 4
0
def get_appLockerfishingnetpat(files_found, report_folder, seeker, wrap_text):

    standardKey = '526e7934384e693861506a59436e5549'
    standardIV = '526e7934384e693861506a59436e5549'
    data_list = []

    for file_found in files_found:
        file_found = str(file_found)

        tree = ET.parse(file_found)
        root = tree.getroot()
        encryptedPattern = root.findall(
            './string[@name="85B064D26810275C89F1F2CC15E20B442E98874398F16F6717BBD5D34920E3F8"]'
        )[0].text
        cipher = AES.new(bytes.fromhex(standardKey), AES.MODE_CBC,
                         bytes.fromhex(standardIV))
        decryptedPattern = unpad(
            cipher.decrypt(bytes.fromhex(encryptedPattern)), AES.block_size)

        data_list.append((encryptedPattern, decryptedPattern))

        if data_list:
            report = ArtifactHtmlReport('Calculator Locker Pattern')
            report.start_artifact_report(report_folder,
                                         'Calculator Locker Pattern')
            report.add_script()
            data_headers = ('Encrypted Pattern', 'Decrypted Pattern')
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_no_escape=['Media'])
            report.end_artifact_report()

            tsvname = f'Calculator Locker Pattern data'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Calculator Locker Pattern data available')
Esempio n. 5
0
def get_aggDictpasscode(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute("""
	select
	date(dayssince1970*86400, 'unixepoch'),
	key,
	value
	from
	scalars
	where key like 'com.apple.passcode.numpasscode%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = ''
        report = ArtifactHtmlReport('Aggregate Dictionary Passcode State')
        report.start_artifact_report(report_folder, 'Passcode State',
                                     description)
        report.add_script()
        data_headers = ('Day', 'Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Agg Dict Dictionary Passcode State'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Aggregate Dictionary Passcode State'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc("No Agg Dict Dictionary Data available")
Esempio n. 6
0
def get_bluetoothPaired(file_found, report_folder, seeker):
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute("""
    select 
    Uuid,
    Name,
    NameOrigin,
    Address,
    ResolvedAddress,
    LastSeenTime,
    LastConnectionTime
    from 
    PairedDevices
    """)

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []    
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4],row[6]))
    
        description = ''
        report = ArtifactHtmlReport('Bluetooth Paired LE')
        report.start_artifact_report(report_folder, 'Paired LE', description)
        report.add_script()
        data_headers = ('UUID','Name','Name Origin','Address','Resolved Address','Last Connection Time')     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Bluetooth Paired LE'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available for Bluetooth Paired LE')
    
    db.close()
Esempio n. 7
0
def get_smembersEvents(files_found, report_folder, seeker, wrap_text):
    
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select 
    datetime(created_at /1000, "unixepoch"), 
    type, 
    value,
    in_snapshot
    FROM device_events
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung Members - Events')
        report.start_artifact_report(report_folder, 'Samsung Members - Events')
        report.add_script()
        data_headers = ('Created At','Type','Value','Snapshot?' )
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = f'samsung members - events'
        tsv(report_folder, data_headers, data_list, tsvname)
        
        tlactivity = f'Samsung Members - Events'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Samsung Members - Events data available')
    
    db.close()
    return
Esempio n. 8
0
def get_installedappsLibrary(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        account,
        doc_id,
        case
        when purchase_time = 0 THEN ''
        when purchase_time > 0 THEN datetime(purchase_time / 1000, "unixepoch")
        END as pt
    FROM
    ownership  
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Installed Apps (Library)')
        report.start_artifact_report(report_folder, 'Installed Apps (Library)')
        report.add_script()
        data_headers = ('Account', 'Doc ID', 'Purchase Time')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'installed apps library'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Installed Apps (Library) data available')

    db.close()
    return
Esempio n. 9
0
def get_Agent_Accounts(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        data._id,
        data.account_name,
        data.account_type
    FROM data
    ORDER BY 
        data._id ASC
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Agent_Accounts')
        report.start_artifact_report(report_folder, 'Agent_Accounts')
        report.add_script()
        data_headers = (
            'ID', 'Account', 'Type'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Agent_Accounts'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Agent_Accounts data available')

    db.close()
    return
Esempio n. 10
0
def get_suggestions(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('suggestions.xml'):
            continue # Skip all other files
        
        data_list = []
        tree = ET.parse(file_found)
        root = tree.getroot()
        
        for elem in root:
            item = elem.attrib
            if item['name'] == 'com.android.settings.suggested.category.DEFERRED_SETUP_setup_time':
                timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
                data_list.append((timestamp, item['name']))
            if item['name'] == 'com.android.settings/com.android.settings.biometrics.fingerprint.FingerprintEnrollSuggestionActivity_setup_time':
                timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
                data_list.append((timestamp, item['name']))
            if item['name'] == 'com.google.android.setupwizard/com.google.android.setupwizard.deferred.DeferredSettingsSuggestionActivity_setup_time':
                timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
                data_list.append((timestamp, item['name']))
        
        if data_list:
            report = ArtifactHtmlReport('Suggestions.xml')
            report.start_artifact_report(report_folder, 'Suggestions.xml')
            report.add_script()
            data_headers = ('Timestamp','Name')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'Suggestions XML data'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Suggestions XML data'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Suggestions XML data available')
Esempio n. 11
0
def get_aggDictpasscode(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATE(DAYSSINCE1970*86400, 'unixepoch') AS DAY,
	KEY AS "KEY",
	VALUE AS "VALUE"
	FROM
	SCALARS
	where key like 'com.apple.passcode.NumPasscode%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = ''
        report = ArtifactHtmlReport('Aggregate Dictionary Passcode State')
        report.start_artifact_report(report_folder, 'Passcode State',
                                     description)
        report.add_script()
        data_headers = ('Day', 'Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Agg Dict Dictionary Passcode State'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Aggregate Dictionary Passcode State'
        timeline(report_folder, tlactivity, data_list)
    else:
        logfunc("No Agg Dict Dictionary Data available")
Esempio n. 12
0
def get_aggDictpasscodetype(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	DATE(DAYSSINCE1970*86400, 'unixepoch') AS DAY,
	KEY AS "KEY",
	CASE 
		WHEN VALUE=-1 THEN '6-Digit'
		WHEN VALUE=0 THEN 'No Passcode'
		WHEN VALUE=1 THEN '4-Digit'
		WHEN VALUE=2 THEN 'Custom Alphanumeric'
		WHEN VALUE=3 THEN 'Custom Numeric'
		ELSE "N/A"
	END "VALUE"
	FROM
	SCALARS
	where key like 'com.apple.passcode.PasscodeType%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2]))

    description = ''
    report = ArtifactHtmlReport('Aggregate Dictionary Passcode Type')
    report.start_artifact_report(report_folder, 'Passcode Type', description)
    report.add_script()
    data_headers = ('Day', 'Key', 'Value')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Agg Dict Dictionary Passcode Type'
    tsv(report_folder, data_headers, data_list, tsvname)
Esempio n. 13
0
def get_geodApplications(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute("""
	SELECT count_type, app_id, createtime
	FROM mkcount
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[2], row[0], row[1]))
            description = ''
        report = ArtifactHtmlReport('Geolocation')
        report.start_artifact_report(report_folder, 'Applications',
                                     description)
        report.add_script()
        data_headers = ("Creation Time", "Count ID", "Application")
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = 'Geolocation Applications'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'Geolocation Applications'
        timeline(report_folder, tlactivity, data_list, data_headers)

    else:
        logfunc('No data available for Geolocation Applications')

    db.close()
    return
Esempio n. 14
0
def get_aggDictpasscodetype(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute("""
	select
	date(dayssince1970*86400, 'unixepoch'),
	key,
	case 
	when value=-1 then '6-digit'
	when value=0 then 'no passcode'
	when value=1 then '4-digit'
	when value=2 then 'custom alphanumeric'
	when value=3 then 'custom numeric'
	else "n/a"
	end "value"
	from
	scalars
	where key like 'com.apple.passcode.passcodetype%'
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2]))

    description = ''
    report = ArtifactHtmlReport('Aggregate Dictionary Passcode Type')
    report.start_artifact_report(report_folder, 'Passcode Type', description)
    report.add_script()
    data_headers = ('Day', 'Key', 'Value')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Agg Dict Dictionary Passcode Type'
    tsv(report_folder, data_headers, data_list, tsvname)
Esempio n. 15
0
def get_discordManifest(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        if os.path.isfile(file_found):
            with open(file_found) as f_in:
                for jsondata in f_in:
                    jsonfinal = json.loads(jsondata)

        for key, value in jsonfinal.items():
            data_list.append((key, value))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Discord Manifest')
        report.start_artifact_report(report_folder, 'Discord Manifest')
        report.add_script()
        data_headers = ('Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Discord Manifest'
        tsv(report_folder, data_headers, data_list, tsvname)
Esempio n. 16
0
def read_sms_messages(db, report_folder, file_found):
    cursor = db.cursor()
    cursor.execute(sms_query)
    all_rows = cursor.fetchall()
    entries = len(all_rows)
    if entries > 0:
        report = ArtifactHtmlReport('SMS messages')
        report.start_artifact_report(report_folder, 'SMS messages')
        report.add_script()
        data_headers = ('MSG ID', 'Thread ID', 'Address', 'Contact ID', 'Date',
                        'Date sent', 'Read', 'Type', 'Body', 'Service Center',
                        'Error code')
        data_list = []
        for row in all_rows:
            data_list.append((row['msg_id'], row['thread_id'], row['address'],
                              row['person'], row['date'], row['date_sent'],
                              row['read'], row['type'], row['body'],
                              row['service_center'], row['error_code']))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No SMS messages found!')
Esempio n. 17
0
def get_sbrowser(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
        url,
        title,
        visit_count,
        datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch"),
        hidden
    from urls  
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Browser History')
        report.start_artifact_report(report_folder, 'Browser History')
        report.add_script()
        data_headers = (
            'URL', 'Title', 'Visit Count', 'Last Visit Time', 'Hidden'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append(
                (textwrap.fill(row[0],
                               width=100), row[1], row[2], row[3], row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No browser history data available')

    db.close()
    return
Esempio n. 18
0
def get_smanagerCrash(files_found, report_folder, seeker, wrap_text):

    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(crash_time / 1000, "unixepoch"),
    package_name
    from crash_info
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung Smart Manager - Crash')
        report.start_artifact_report(report_folder,
                                     'Samsung Smart Manager - Crash')
        report.add_script()
        data_headers = ('Timestamp', 'Package Name')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'samsung smart manager - crash'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Samsung Smart Manager - Crash'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Samsung Smart Manager - Crash data available')

    db.close()
    return
Esempio n. 19
0
def get_safariBookmarks(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		title,
		url,
		hidden
	FROM
	bookmarks
			""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        description = ''
        report = ArtifactHtmlReport('Safari Browser Bookmarks')
        report.start_artifact_report(report_folder, 'Bookmarks', description)
        report.add_script()
        data_headers = ('Title', 'URL', 'Hidden')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Safari Browser Bookmarks'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No data available in table')

    db.close()
    return
Esempio n. 20
0
def process_ssecure(file_path, uid, report_folder):

    try:
        tree = ET.parse(file_path)
        root = tree.getroot()
    except ET.ParseError:  # Fix for android 11 invalid XML file (no root element present)
        with open(file_path) as f:
            xml = f.read()
            root = ET.fromstring(
                re.sub(r"(<\?xml[^>]+\?>)", r"\1<root>", xml) + "</root>")
    data_list = []
    for setting in root.iter('setting'):
        nme = setting.get('name')
        val = setting.get('value')
        if nme == 'bluetooth_name':
            data_list.append((nme, val))
            logdevinfo(f"Bluetooth name: {val}")
        elif nme == 'mock_location':
            data_list.append((nme, val))
        elif nme == 'android_id':
            data_list.append((nme, val))
        elif nme == 'bluetooth_address':
            data_list.append((nme, val))
            logdevinfo(f"Bluetooth address: {val}")

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Settings Secure')
        report.start_artifact_report(report_folder, f'Settings_Secure_{uid}')
        report.add_script()
        data_headers = ('Name', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_path)
        report.end_artifact_report()

        tsvname = f'settings secure'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Settings Secure data available')
Esempio n. 21
0
def process_accounts_ce_authtokens(folder, uid, report_folder):

    #Query to create report
    db = open_sqlite_db_readonly(folder)
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT
        accounts._id,
        accounts.name,
        accounts.type,
        authtokens.type,
        authtokens.authtoken
    FROM accounts, authtokens
    WHERE
        accounts._id = authtokens.accounts_id
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Authokens')
        report.start_artifact_report(report_folder, f'Authtokens_{uid}')
        report.add_script()
        data_headers = ('ID', 'Name', 'Account Type', 'Authtoken Type',
                        'Authtoken')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()

        tsvname = f'authtokens {uid}'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc(f'No Authtokens_{uid} data available')
    db.close()
Esempio n. 22
0
def get_bluetoothOther(file_found, report_folder, seeker):
    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()

    cursor.execute(
    """
    SELECT
    Name,
    Address,
    LastSeenTime,
    Uuid
    FROM
    OtherDevices
    order by Name desc
    """)

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []    
    if usageentries > 0:
        for row in all_rows:
            data_list.append((row[0], row[1], row[3]))
    
        description = ''
        report = ArtifactHtmlReport('Bluetooth Other LE')
        report.start_artifact_report(report_folder, 'Other LE', description)
        report.add_script()
        data_headers = ('Name','Address','UUID')     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Bluetooth Other LE'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available for Bluetooth Other')
    
    db.close()
Esempio n. 23
0
def get_aggDict(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
		DATE(DISTRIBUTIONKEYS.DAYSSINCE1970*86400, 'unixepoch') AS "DAY",
		DISTRIBUTIONKEYS.KEY AS "KEY",
		DISTRIBUTIONVALUES.VALUE AS "VALUE",
		DISTRIBUTIONVALUES.SECONDSINDAYOFFSET AS "SECONDS IN DAY OFFSET",
		DISTRIBUTIONVALUES.DISTRIBUTIONID AS "DISTRIBUTIONVALUES TABLE ID"
	FROM
		DISTRIBUTIONKEYS 
		LEFT JOIN
			DISTRIBUTIONVALUES 
			ON DISTRIBUTIONKEYS.ROWID = DISTRIBUTIONVALUES.DISTRIBUTIONID
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2], row[3], row[4]))

    description = ''
    report = ArtifactHtmlReport('Aggregate Dictionary Distributed Keys')
    report.start_artifact_report(report_folder, 'Distributed Keys',
                                 description)
    report.add_script()
    data_headers = ('Day', 'Key', 'Value', 'Seconds in Day Offset',
                    'Distribution Values Table ID')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Agg Dict Dist Keys'
    tsv(report_folder, data_headers, data_list, tsvname)
Esempio n. 24
0
def get_accs(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute(
        """
		SELECT
		ZACCOUNTTYPEDESCRIPTION,
		ZUSERNAME,
		DATETIME(ZDATE+978307200,'UNIXEPOCH','UTC' ) AS 'ZDATE TIMESTAMP',
		ZACCOUNTDESCRIPTION,
		ZACCOUNT.ZIDENTIFIER,
		ZACCOUNT.ZOWNINGBUNDLEID
		FROM ZACCOUNT
		JOIN ZACCOUNTTYPE ON ZACCOUNTTYPE.Z_PK=ZACCOUNT.ZACCOUNTTYPE
		ORDER BY ZACCOUNTTYPEDESCRIPTION
		"""
    )

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))                
        report = ArtifactHtmlReport('Account Data')
        report.start_artifact_report(report_folder, 'Account Data')
        report.add_script()
        data_headers = ('Account Desc.','Username','Timestamp','Description','Identifier','Bundle ID' )     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Account Data'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc("No Account Data available")
Esempio n. 25
0
def get_powerlogVolume(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    DATETIME(TIMESTAMP, 'UNIXEPOCH') AS TIMESTAMP,
    VOLUME,
    CASE MUTED 
        WHEN "0" THEN "NO" 
        WHEN "1" THEN "YES" 
    END "MUTED", 
    ID AS "PLAUDIOAGENT_EVENTFORWARD_OUTPUT TABLE ID" 
    FROM
    PLAUDIOAGENT_EVENTFORWARD_OUTPUT
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:    
            data_list.append((row[0],row[1],row[2],row[3]))

        report = ArtifactHtmlReport('Powerlog Volume')
        report.start_artifact_report(report_folder, 'Volume')
        report.add_script()
        data_headers = ('Timestamp','Volume','Muted','Event Forward Output Table ID')   
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Powerlog Volume'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return   
Esempio n. 26
0
def get_chromeSearchTerms(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        url_id,
        term,
        id,
        url,
        datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    FROM keyword_search_terms, urls
    WHERE url_id = id
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Keyword Search Terms')
        report.start_artifact_report(report_folder, 'Search Terms')
        report.add_script()
        data_headers = (
            'Term', 'URL', 'Last Visit Time'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[1], (textwrap.fill(row[3],
                                                     width=100)), row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No keyword search terms data available')

    db.close()
    return
Esempio n. 27
0
def get_dhcphp(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    data_list = []
    reportval = ''
    with open(file_found, "r") as filefrom:
        for line in filefrom:
            cline = line.strip()
            if cline == "{":
                reportval = reportval + (
                    "<table><tr><td>Key</td><td>Values</td></tr>")
            elif cline == "}":
                reportval = reportval + ("</table>")
                data_list.append((reportval, ))
                reportval = ''
            # elif cline == '':
            # 	f.write('<br>')
            else:
                ll = cline.split("=")
                reportval = reportval + (f"<tr><td>{ll[0]}</td>")
                reportval = reportval + (f"<td>{ll[1]}</td></tr>")

    if len(data_list) > 0:
        report = ArtifactHtmlReport('DHCP Hotspot Clients')
        report.start_artifact_report(report_folder, 'Hotspot Clients')
        report.add_script()
        data_headers = ('Hotspot Clients', )
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = 'DHCP Hotspot Clients'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available')
    return
Esempio n. 28
0
def get_mobileBackup(files_found, report_folder, seeker):
    data_list = []
    file_found = str(files_found[0])

    with open(file_found, 'rb') as fp:
        pl = plistlib.load(fp)

        if 'BackupStateInfo' in pl.keys():
            for key, val in pl['BackupStateInfo'].items():
                if key == 'isCloud':
                    data_list.append((key, val))
                if key == 'date':
                    data_list.append((key, val))
        else:
            pass

        if 'RestoreInfo' in pl.keys():
            for key, val in pl['RestoreInfo'].items():
                if key == 'BackupBuildVersion':
                    data_list.append((key, val))
                if key == 'DeviceBuildVersion':
                    data_list.append((key, val))
                if key == 'WasCloudRestore':
                    data_list.append((key, val))
                if key == 'RestoreDate':
                    data_list.append((key, val))

    report = ArtifactHtmlReport('Mobile Backup')
    report.start_artifact_report(report_folder, 'Mobile Backup')
    report.add_script()
    data_headers = ('Key', 'Value')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Mobile Backup'
    tsv(report_folder, data_headers, data_list, tsvname)
Esempio n. 29
0
def get_googlePlaySearches(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(date / 1000, "unixepoch"),
    display1,
    query
    from suggestions
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Google Play Searches')
        report.start_artifact_report(report_folder, 'Google Play Searches')
        report.add_script()
        data_headers = (
            'Timestamp', 'Display', 'query'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'google play searches'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Google Play Searches data available')

    db.close()
    return
Esempio n. 30
0
def get_dataUsageProcessB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    DATETIME(ZPROCESS.ZTIMESTAMP+ 978307200, 'UNIXEPOCH') AS "TIMESTAMP",
    DATETIME(ZPROCESS.ZFIRSTTIMESTAMP + 978307200, 'UNIXEPOCH') AS "PROCESS FIRST TIMESTAMP",
    ZPROCESS.ZPROCNAME AS "PROCESS NAME",
    ZPROCESS.ZBUNDLENAME AS "BUNDLE ID",
    ZPROCESS.Z_PK AS "ZPROCESS TABLE ID" 
    FROM ZPROCESS
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        report = ArtifactHtmlReport('Data Usage')
        report.start_artifact_report(report_folder, 'Data Usage Process')
        report.add_script()
        data_headers = ('Timestamp', 'Process First Timestamp', 'Process Name',
                        'Bundle ID', 'Table ID')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Data Usage Process'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Data Usage available')

    db.close()
    return