Exemplo n.º 1
0
def get_packageGplinks(files_found, report_folder, seeker, wrap_text):
    data_list = []

    for file_found in files_found:
        if 'sbin' not in file_found:
            file_found = str(file_found)
            source_file = file_found.replace(seeker.directory, '')

    with open(file_found) as data:
        values = data.readlines()

    for x in values:
        bundleid = x.split(' ', 1)
        url = f'<a href="https://play.google.com/store/apps/details?id={bundleid[0]}" target="_blank"><font color="blue">https://play.google.com/store/apps/details?id={bundleid[0]}</font></a>'
        data_list.append((bundleid[0], url))

    usageentries = len(data_list)
    if usageentries > 0:
        report = ArtifactHtmlReport('Google Play Links for Apps')
        report.start_artifact_report(report_folder,
                                     'Google Play Links for Apps')
        report.add_script()
        data_headers = ('Bundle ID', 'Possible Google Play Store Link')
        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         file_found,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'Google Play Links for Apps'
        tsv(report_folder, data_headers, data_list, tsvname, source_file)

    else:
        logfunc('No Google Play Links for Apps data available')
Exemplo n.º 2
0
def get_appLockerfishingnetdb(files_found, report_folder, seeker, wrap_text):

    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        message = 'The located database is encrypted. It contains information regarding the source directory of the encrypted files, timestamp metadata, and original filenames.'
        decryptioninst = 'To decrypt follow the instructions at the following URL: https://theincidentalchewtoy.wordpress.com/2021/12/07/decrypting-the-calculator-apps/'
        keytodecrypt = 'Rny48Ni8aPjYCnUI'

        data_list.append((message, decryptioninst, keytodecrypt))

        if data_list:
            report = ArtifactHtmlReport('Calculator Locker Database')
            report.start_artifact_report(report_folder,
                                         'Calculator Locker Database')
            report.add_script()
            data_headers = ('Encrypted Pattern', 'Decrypted Pattern',
                            'Key To Decrypt')
            report.write_artifact_data_table(data_headers,
                                             data_list,
                                             file_found,
                                             html_no_escape=['Media'])
            report.end_artifact_report()

            tsvname = f'Calculator Locker Database data'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Calculator Locker Database data available')
Exemplo n.º 3
0
def get_queryPredictions(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    datetime(creationTimestamp, "UNIXEPOCH") as START, 
    content,
    isSent,
    conversationId,
    id,
    uuid
    from messages 
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        data_list = []
        for row in all_rows:    
            data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))

        report = ArtifactHtmlReport('Query Predictions')
        report.start_artifact_report(report_folder, 'Query Predictions')
        report.add_script()
        data_headers = ('Timestamp','Content','Is Sent?','Conversation ID','ID','UUID')   
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Query Predictions'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available in table')

    db.close()
    return   
def get_sbrowserTopSites(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select
    url,
    url_rank,
    title,
    redirects
    FROM
    top_sites ORDER by url_rank asc
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Browser Top Sites')
        report.start_artifact_report(report_folder, 'Browser Top Sites')
        report.add_script()
        data_headers = ('URL', 'Rank', 'Title', 'Redirects')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Browser Top Sites data available')

    db.close()
    return
Exemplo n.º 5
0
def get_appleMapsApplication(files_found, report_folder, seeker):
    versionnum = 0
    file_found = str(files_found[0])
    
    with open(file_found, 'rb') as f:
        deserialized_plist = plistlib.load(f)
        
        types = {'1': {'type': 'double', 'name': 'Latitude'},
                '2': {'type': 'double', 'name': 'Longitude'}, 
                '3': {'type': 'double', 'name': ''}, 
                '4': {'type': 'fixed64', 'name': ''}, 
                '5': {'type': 'double', 'name': ''}
                }    
        
        internal_deserialized_plist, di = blackboxprotobuf.decode_message((deserialized_plist['__internal__LastActivityCamera']),types)
        latitude = (internal_deserialized_plist['Latitude'])
        longitude = (internal_deserialized_plist['Longitude'])
        
        data_list = []
        data_list.append((latitude, longitude))
        report = ArtifactHtmlReport('Apple Maps App')
        report.start_artifact_report(report_folder, 'Apple Maps App')
        report.add_script()
        data_headers = ('Latitude','Longitude' )     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Apple Maps Application'
        tsv(report_folder, data_headers, data_list, tsvname)
    

            
Exemplo n.º 6
0
def get_celWireless(files_found, report_folder, seeker):
    data_list = []
    for filepath in files_found:
        basename = os.path.basename(filepath)
        if (basename == "com.apple.commcenter.device_specific_nobackup.plist"
                or basename == "com.apple.commcenter.plist"):
            p = open(filepath, "rb")
            plist = plistlib.load(p)
            for key, val in plist.items():
                data_list.append((key, val, filepath))
                if key == "ReportedPhoneNumber":
                    logdevinfo(f"Reported Phone Number: {val}")

                if key == "CDMANetworkPhoneNumberICCID":
                    logdevinfo(f"CDMA Network Phone Number ICCID: {val}")

                if key == "imei":
                    logdevinfo(f"IMEI: {val}")

                if key == "LastKnownICCID":
                    logdevinfo(f"Last Known ICCID: {val}")

                if key == "meid":
                    logdevinfo(f"MEID: {val}")

    location = 'see source field'
    report = ArtifactHtmlReport('Cellular Wireless')
    report.start_artifact_report(report_folder, 'Cellular Wireless')
    report.add_script()
    data_headers = ('Key', 'Values', 'Source')
    report.write_artifact_data_table(data_headers, data_list, location)
    report.end_artifact_report()

    tsvname = 'Cellular Wireless'
    tsv(report_folder, data_headers, data_list, tsvname)
Exemplo n.º 7
0
def get_chromeCookies(files_found, report_folder, seeker):
    
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    host_key,
    name,
    value,
    CASE
        last_access_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(last_access_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "last_access_utc", 
    CASE
        creation_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(creation_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "creation_utc", 
    CASE
        expires_utc 
        WHEN
            "0" 
        THEN
            "0" 
        ELSE
            datetime(expires_utc / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch")
    END AS "expires_utc", 
    path
    FROM
    cookies
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Chrome Cookies')
        report.start_artifact_report(report_folder, 'Cookies')
        report.add_script()
        data_headers = ('Host','Name','Value','Last Access Date','Created Date','Expiration Date','Path' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],(textwrap.fill(row[2], width=50)),row[3],row[4],row[5],row[6]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Chrome cookies data available')
    
    db.close()
    return
Exemplo n.º 8
0
def get_atrackerdetect(files_found, report_folder, seeker, wrap_text):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)

        tree = ET.parse(file_found)
        root = tree.getroot()

        for elem in root.iter():
            attribute = (elem.attrib)
            if attribute:
                data = attribute.get('name')
                if data.startswith('device'):
                    mac = data.split('_', 2)[1]
                    desc = data.split('_', 2)[2]
                    data_list.append((desc, mac, elem.text))
                else:
                    data_list.append((data, attribute.get('value'), ''))

        if data_list:
            report = ArtifactHtmlReport('Apple Tracker Detect Prefs')
            report.start_artifact_report(report_folder,
                                         'Apple Tracker Detect Prefs')
            report.add_script()
            data_headers = ('Key', 'Value', 'Milliseconds from Last Boot Time')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Apple Tracker Detect Prefs'
            tsv(report_folder, data_headers, data_list, tsvname)

        else:
            logfunc('No Apple Tracker Detect Prefs data available')
Exemplo n.º 9
0
def get_setupWizardinfo(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('setup_wizard_info.xml'):
            continue  # Skip all other files

        data_list = []
        tree = ET.parse(file_found)
        root = tree.getroot()

        for elem in root:
            item = elem.attrib
            if item['name'] == 'suw_finished_time_ms':
                timestamp = (datetime.datetime.utcfromtimestamp(
                    int(item['value']) / 1000).strftime('%Y-%m-%d %H:%M:%S'))
                data_list.append((timestamp, item['name']))

        if data_list:
            report = ArtifactHtmlReport('Setup_Wizard_Info.xml')
            report.start_artifact_report(report_folder,
                                         'Setup_Wizard_Info.xml')
            report.add_script()
            data_headers = ('Timestamp', 'Name')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Setup_Wizard_Info XML data'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Setup_Wizard_Info XML data'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Setup_Wizard_Info XML data available')
Exemplo n.º 10
0
def get_tileAppDb(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('tile-TileNetworkDB.sqlite'):
            break

    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(ZTIMESTAMP,'unixepoch','31 years'),
    ZNAME,
    datetime(ZACTIVATION_TIMESTAMP,'unixepoch','31 years'),
    datetime(ZREGISTRATION_TIMESTAMP,'unixepoch','31 years'),
    ZALTITUDE, 
    ZLATITUDE, 
    ZLONGITUDE,
    ZID,
    ZNODE_TYPE, 
    ZSTATUS,
    ZIS_LOST,
    datetime(ZLAST_LOST_TILE_COMMUNITY_CONNECTION,'unixepoch','31 years')
    FROM ZTILENTITY_NODE INNER JOIN ZTILENTITY_TILESTATE ON ZTILENTITY_NODE.ZTILE_STATE = ZTILENTITY_TILESTATE.Z_PK
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11]))

            description = ''
            report = ArtifactHtmlReport(
                'Tile App - Tile Information & Geolocation')
            report.start_artifact_report(report_folder,
                                         'Tile App DB Info & Geolocation',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Tile Name', 'Activation Timestamp',
                            'Registration Timestamp', 'Altitude', 'Latitude',
                            'Longitude', 'Tile ID', 'Tile Type', 'Status',
                            'Is Lost?', 'Last Community Connection')
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = 'Tile App DB Info Geolocation'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'Tile App DB Info Geolocation'
            timeline(report_folder, tlactivity, data_list)
    else:
        logfunc('No Tile App DB data available')

    db.close()
    return
Exemplo n.º 11
0
def get_chrome(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not os.path.basename(
                file_found) == 'History':  # skip -journal and other files
            continue
        elif file_found.find('.magisk') >= 0 and file_found.find(
                'mirror') >= 0:
            continue  # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data??
        browser_name = 'Chrome'
        if file_found.find('app_sbrowser') >= 0:
            browser_name = 'Browser'

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
            datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), "unixepoch"),
            url,
            title,
            visit_count,
            hidden
        from urls  
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport(f'{browser_name} History')
            #check for existing and get next name for report file, so report from another file does not get overwritten
            report_path = os.path.join(report_folder,
                                       f'{browser_name} History.temphtml')
            report_path = get_next_unused_name(
                report_path)[:-9]  # remove .temphtml
            report.start_artifact_report(report_folder,
                                         os.path.basename(report_path))
            report.add_script()
            data_headers = ('Last Visit Time', 'URL', 'Title', 'Visit Count',
                            'Hidden')
            data_list = []
            for row in all_rows:
                if wrap_text:
                    data_list.append((textwrap.fill(row[0], width=100), row[1],
                                      row[2], row[3], row[4]))
                else:
                    data_list.append((row[0], row[1], row[2], row[3], row[4]))
            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'{browser_name} History'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'{browser_name} History'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc(f'No {browser_name} history data available')

        db.close()
Exemplo n.º 12
0
def get_dhcpl(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    data_list = []
    with open(file_found, "rb") as fp:
        pl = plistlib.load(fp)
        for key, val in pl.items():
            if key == "IPAddress":
                data_list.append((key, val))
            if key == "LeaseLength":
                data_list.append((key, val))
            if key == "LeaseStartDate":
                data_list.append((key, val))
            if key == "RouterHardwareAddress":
                data_list.append((key, val))
            if key == "RouterIPAddress":
                data_list.append((key, val))
            if key == "SSID":
                data_list.append((key, val))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('DHCP Received List')
        report.start_artifact_report(report_folder, 'Received List')
        report.add_script()
        data_headers = ('Key', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'DHCP Received List'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No data available')
    return
Exemplo n.º 13
0
def get_recentApphistory(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        with open(file_found, 'rb') as f:
            plist = biplist.readPlist(f)
            RecentAppHistory = plist.get('CARRecentAppHistory')
            
        if RecentAppHistory is not None:
            if len(RecentAppHistory) > 0:
                for bundleid, timestamp in RecentAppHistory.items():
                    timestamp = (datetime.datetime.fromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S'))
                    data_list.append((timestamp, bundleid))
        
    if len(data_list) > 0:
        description = 'CarPlay recent app history.'
        report = ArtifactHtmlReport('CarPlay Recent App History')
        report.start_artifact_report(report_folder, 'CarPlay Recent App History', description)
        report.add_script()
        data_headers = ('Timestamp','Bundle ID')     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'CarPlay Recent App History'
        tsv(report_folder, data_headers, data_list, tsvname)
        
        tlactivity = 'CarPlay Recent App History'
        timeline(report_folder, tlactivity, data_list, data_headers)
        
    else:
        logfunc('No data on CarPlay Recent App History')
Exemplo n.º 14
0
def process_ssecure(file_path, uid, report_folder):

    try:
        tree = ET.parse(file_path)
        root = tree.getroot()
    except ET.ParseError:  # Fix for android 11 invalid XML file (no root element present)
        with open(file_path) as f:
            xml = f.read()
            root = ET.fromstring(
                re.sub(r"(<\?xml[^>]+\?>)", r"\1<root>", xml) + "</root>")
    data_list = []
    for setting in root.iter('setting'):
        nme = setting.get('name')
        val = setting.get('value')
        if nme == 'bluetooth_name':
            data_list.append((nme, val))
        elif nme == 'mock_location':
            data_list.append((nme, val))
        elif nme == 'android_id':
            data_list.append((nme, val))
        elif nme == 'bluetooth_address':
            data_list.append((nme, val))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Settings Secure')
        report.start_artifact_report(report_folder, f'Settings_Secure_{uid}')
        report.add_script()
        data_headers = ('Name', 'Value')
        report.write_artifact_data_table(data_headers, data_list, file_path)
        report.end_artifact_report()

        tsvname = f'settings secure'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Settings Secure data available')
Exemplo n.º 15
0
def get_filesAppsm(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)
        
        if file_found.endswith('smartfolders.db'):
            break
            
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT * 
    FROM
    FILENAMES
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:

        for row in all_rows:
            
            output_file = open(os.path.join(report_folder, row[2]+'.bplist'), "wb") 
            output_file.write(row[1])
            output_file.close()
            
            with open(os.path.join(report_folder, row[2]+'.bplist'), "rb") as f:
                deserialized_plist = nd.deserialize_plist(f)
            for x, y in deserialized_plist.items():
                if x == '_creationDate':
                    creationdate = y
                if x == '_contentModificationDate':
                    contentmodificationdate = y
                if x == '_flags':
                    flags = y
                if x == '_userInfo':
                    userinfo = y
                if x == '_childItemCount':
                    childitemcount = y
            lasthitdate = datetime.datetime.fromtimestamp(row[3])
            
            data_list.append((lasthitdate, row[0], row[2],row[4], creationdate, contentmodificationdate, userinfo, childitemcount, flags))
            
            description = 'Files App - Files stored in the "On my iPad" area.'
            report = ArtifactHtmlReport('Files App - Filenames')
            report.start_artifact_report(report_folder, 'Files App - Filenames', description)
            report.add_script()
            data_headers = ('Last Hit Date','Folder ID','Filename','Frequency at Las Hit Date','Creation Date','Modification Date','User Info','Child Item Count','Flags' )     
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = 'Files App - Filenames'
            tsv(report_folder, data_headers, data_list, tsvname)
        
            tlactivity = 'Files App - Filenames'
            timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No Files App - Filenames data available')

    db.close()
Exemplo n.º 16
0
def get_runtimePerms(files_found, report_folder, seeker, wrap_text):

    run = 0
    slash = '\\' if is_platform_windows() else '/'

    for file_found in files_found:
        file_found = str(file_found)

        data_list = []
        run = run + 1
        err = 0

        parts = file_found.split(slash)
        if 'mirror' in parts:
            user = '******'
        elif 'system' in parts:
            user = parts[-2]
        elif 'misc_de' in parts:
            user = parts[-4]

        if user == 'mirror':
            continue
        else:
            try:
                ET.parse(file_found)
            except ET.ParseError:
                logfunc('Parse error - Non XML file.')
                err = 1

            if err == 0:
                tree = ET.parse(file_found)
                root = tree.getroot()

                for elem in root:
                    #print(elem.tag)
                    usagetype = elem.tag
                    name = elem.attrib['name']
                    #print("Usage type: "+usagetype)
                    #print('name')
                    for subelem in elem:
                        permission = subelem.attrib['name']
                        granted = subelem.attrib['granted']
                        flags = subelem.attrib['flags']

                        data_list.append(
                            (usagetype, name, permission, granted, flags))

                if len(data_list) > 0:
                    report = ArtifactHtmlReport('Runtime Permissions')
                    report.start_artifact_report(
                        report_folder, f'Runtime Permissions_{user}')
                    report.add_script()
                    data_headers = ('Type', 'Name', 'Permission', 'Granted?',
                                    'Flag')
                    report.write_artifact_data_table(data_headers, data_list,
                                                     file_found)
                    report.end_artifact_report()

                    tsvname = f'Runtime Permissions_{user}'
                    tsv(report_folder, data_headers, data_list, tsvname)
Exemplo n.º 17
0
def get_smanagerCrash(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    package_name,
    datetime(crash_time / 1000, "unixepoch")
    from crash_info
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung Smart Manager - Crash')
        report.start_artifact_report(report_folder,
                                     'Samsung Smart Manager - Crash')
        report.add_script()
        data_headers = ('Package Name', 'Timestamp')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'samsung smart manager - crash'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Samsung Smart Manager - Crash data available')

    db.close()
    return
Exemplo n.º 18
0
def process_accounts_de(folder, uid, report_folder):

    #Query to create report
    db = sqlite3.connect(folder)
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT
        name,
        type,
        datetime(last_password_entry_time_millis_epoch / 1000, 'unixepoch') as 'last pass entry'
        FROM
    accounts
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Accounts_de')
        report.start_artifact_report(report_folder, f'accounts_de_{uid}')
        report.add_script()
        data_headers = ('Name', 'Type', 'Last password entry')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()
    else:
        logfunc(f'No accounts_de_{uid} data available')
    db.close()
Exemplo n.º 19
0
def process_accounts_ce_authtokens(folder, uid, report_folder):

    #Query to create report
    db = sqlite3.connect(folder)
    cursor = db.cursor()

    #Query to create report
    cursor.execute('''
    SELECT
        accounts._id,
        accounts.name,
        accounts.type,
        authtokens.type,
        authtokens.authtoken
    FROM accounts, authtokens
    WHERE
        accounts._id = authtokens.accounts_id
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Authokens')
        report.start_artifact_report(report_folder, f'Authtokens_{uid}')
        report.add_script()
        data_headers = ('ID', 'Name', 'Account Type', 'Authtoken Type',
                        'Authtoken')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()
    else:
        logfunc(f'No Authtokens_{uid} data available')
    db.close()
Exemplo n.º 20
0
def process_ssecure(folder, uid, report_folder):

    tree = ET.parse(folder)
    root = tree.getroot()
    data_list = []
    for setting in root.findall('setting'):
        nme = setting.get('name')
        val = setting.get('value')
        if nme == 'bluetooth_name':
            data_list.append((nme, val))
        elif nme == 'mock_location':
            data_list.append((nme, val))
        elif nme == 'android_id':
            data_list.append((nme, val))
        elif nme == 'bluetooth_address':
            data_list.append((nme, val))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Settings Secure')
        report.start_artifact_report(report_folder, f'Settings_Secure_{uid}')
        report.add_script()
        data_headers = ('Name', 'Value')
        report.write_artifact_data_table(data_headers, data_list, folder)
        report.end_artifact_report()

        tsvname = f'settings secure'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No Settings Secure data available')
Exemplo n.º 21
0
def get_aggDictScalars(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()

    cursor.execute("""
	SELECT
	   DATE(DAYSSINCE1970*86400, 'unixepoch') AS DAY,
	   KEY AS "KEY",
	   VALUE AS "VALUE"
	FROM
	   SCALARS
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    for row in all_rows:
        data_list.append((row[0], row[1], row[2]))

    description = ''
    report = ArtifactHtmlReport('Aggregate Dictionary Scalars')
    report.start_artifact_report(report_folder, 'Scalars', description)
    report.add_script()
    data_headers = ('Day', 'Key', 'Value')
    report.write_artifact_data_table(data_headers, data_list, file_found)
    report.end_artifact_report()

    tsvname = 'Agg Dict Scalars'
    tsv(report_folder, data_headers, data_list, tsvname)
Exemplo n.º 22
0
def get_cacheRoutesGmap(files_found, report_folder, seeker):
    data_list = []
    for file_found in files_found:
        file_found = str(file_found)
        filename = os.path.basename(file_found)
        noext = os.path.splitext(filename)[0]
        noext = int(noext)
        datetime_time = datetime.datetime.fromtimestamp(noext/1000)
        datetime_time = str(datetime_time)
        with open(file_found, 'rb') as f:
            deserialized = plistlib.load(f)
            length = len(deserialized['$objects'])
            for x in range(length):
                try: 
                    lat = deserialized['$objects'][x]['_coordinateLat']
                    lon = deserialized['$objects'][x]['_coordinateLong'] #lat longs
                    data_list.append((datetime_time, lat, lon, file_found))
                except:
                    pass    
            
    if len(data_list) > 0:
        description = 'Google Maps Cache Routes'
        report = ArtifactHtmlReport('Locations')
        report.start_artifact_report(report_folder, 'Google Maps Cache Routes', description)
        report.add_script()
        data_headers = ('Timestamp','Latitude','Longitude','Source File')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Google Maps Cache Routes'
        tsv(report_folder, data_headers, data_list, tsvname)
    
        kmlactivity = 'Google Maps Cache Routes'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
Exemplo n.º 23
0
def get_smyfilesStored(files_found, report_folder, seeker):
    
    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    storage,
    path,
    size,
    datetime(date / 1000, "unixepoch"),
    datetime(latest /1000, "unixepoch")
    from FileCache
    where path is not NULL 
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('My Files DB - Stored Files')
        report.start_artifact_report(report_folder, 'My Files DB - Stored Files')
        report.add_script()
        data_headers = ('Storage','Path','Size','Timestamp','Latest' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0],row[1],row[2],row[3],row[4]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No My Files DB Stored data available')
    
    db.close()
    return
Exemplo n.º 24
0
def get_factory_reset(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('factory_reset'):
            continue # Skip all other files
        
        data_list = []
        file_name = 'factory_reset'
        
        modTimesinceEpoc = os.path.getmtime(file_found)

        reset_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modTimesinceEpoc))
                     
        logdevinfo(f"Factory Reset Timestamp: {reset_time}")
        data_list.append((reset_time, file_name))
                     
        if data_list:
            report = ArtifactHtmlReport('Factory Reset')
            report.start_artifact_report(report_folder, 'Factory Reset')
            report.add_script()
            data_headers = ('Timestamp', 'File Name')
            report.write_artifact_data_table(data_headers, data_list, file_found)
            report.end_artifact_report()
            
            tsvname = f'Factory Reset'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Factory Reset'
            timeline(report_folder, tlactivity, data_list, data_headers)
            
        else:
            logfunc('No Factory Reset data available')
def get_googlePlaySearches(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
    datetime(date / 1000, "unixepoch"),
    display1,
    query
    from suggestions
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Google Play Searches')
        report.start_artifact_report(report_folder, 'Google Play Searches')
        report.add_script()
        data_headers = (
            'Timestamp', 'Display', 'query'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Google Play Searches data available')

    db.close()
    return
Exemplo n.º 26
0
def get_kikBplistmeta(files_found, report_folder, seeker):
	data_list = []
	for file_found in files_found:
		file_found = str(file_found)
		isDirectory = os.path.isdir(file_found)
		if isDirectory:
			pass
		else:
			sha1org = sha1scaled = blockhash = appname = layout = allowforward = filesize = filename = thumb = appid = id = ''
			with open(file_found, 'rb') as f:
				plist = biplist.readPlist(f)
				for key,val in plist.items():
					if key == 'id':
						id = val
					elif key == 'hashes':
						for x in val:
							if x['name'] == 'sha1-original':
								sha1org = x.get('value', '')
							if x['name'] == 'sha1-scaled':
								sha1scaled = x.get('value', '')
							if x['name'] == 'blockhash-scaled':
								blockhash = x.get('value', '')
					elif key == 'string':
						for x in val:
							if x['name'] == 'app-name':
								appname = x.get('value', '')
							if x['name'] == 'layout':
								layout = x.get('value', '')
							if x['name'] == 'allow-forward':
								allowforward = x.get('value', '')
							if x['name'] == 'file-size':
								filesize = x.get('value', '')
							if x['name'] == 'file-name':
								filename = x.get('value', '')
					elif key == 'image':
						thumbfilename = id+'.jpg'
						file = open(f'{report_folder}{thumbfilename}', "wb")
						file.write(val[0]['value'])
						file.close()
						thumb = f'<img src="{report_folder}{thumbfilename}"  width="300"></img>'
					elif key == 'app-id':
						appid = val
						
				data_list.append((id, filename, filesize, allowforward, layout, appname, appid, sha1org, sha1scaled, blockhash, thumb  ))
				aggregate = ''
				
	if len(data_list) > 0:
		head_tail = os.path.split(file_found)
		description = 'Metadata from Kik media directory. Source are bplist files.'
		report = ArtifactHtmlReport('Kik Attachments Bplist Metadata')
		report.start_artifact_report(report_folder, 'Kik Media Metadata', description)
		report.add_script()
		data_headers = ('Content ID ', 'Filename', 'File Size', 'Allow Forward', 'Layout','App Name','App ID', 'SHA1 Original','SHA1 Scaled','Blockhash Scaled', 'Internal Thumbnail')
		report.write_artifact_data_table(data_headers, data_list, head_tail[0],html_escape=False)
		report.end_artifact_report()
		
		tsvname = 'Kik Attachments Bplist Metadata'
		tsv(report_folder, data_headers, data_list, tsvname)
	else:
		logfunc('No data on Kik Attachments Bplist MetadataD')
Exemplo n.º 27
0
def get_etc_hosts(files_found, report_folder, seeker, wrap_text):
    data_list = []
    file_found = str(files_found[0])

    with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile:
        for row in csvfile:
            sline = '\t'.join(row.split())
            sline = sline.split('\t')
            sline_one = sline[0]
            sline_two = sline[1]
            if (sline_one == '127.0.0.1' and sline_two == 'localhost') or \
                (sline_one == '::1' and sline_two == 'ip6-localhost'):
                pass  # Skipping the defaults, so only anomaly entries are seen
            else:
                data_list.append((sline_one, sline_two))

    if len(data_list) > 0:
        report = ArtifactHtmlReport('Etc Hosts')
        report.start_artifact_report(report_folder, f'Etc Hosts')
        report.add_script()
        data_headers = ('IP Address', 'Hostname')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Etc Hosts'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc(f'No etc hosts file available, or nothing significant found.')
Exemplo n.º 28
0
def get_cmh(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    SELECT
        datetime(datetaken /1000, "unixepoch") as times,
        latitude,
        longitude,
        address_text,
        uri,
        _data
    FROM location_view
    ''')
    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Samsung CMH')
        report.start_artifact_report(report_folder, f'Geodata')
        report.add_script()
        data_headers = ('Data Taken', 'Latitude', 'Longitude','Address', 'URI', 'Data Location')
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc(f'No Samsung_CMH_GeoData available')    
    db.close()
    return
Exemplo n.º 29
0
def get_pSettings(files_found, report_folder, seeker):

    file_found = str(files_found[0])
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    cursor.execute('''
    select 
    name,
    value
    from partner
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    if usageentries > 0:
        report = ArtifactHtmlReport('Partner Settings')
        report.start_artifact_report(report_folder, 'Partner Settings')
        report.add_script()
        data_headers = (
            'Name', 'Value'
        )  # Don't remove the comma, that is required to make this a tuple as there is only 1 element
        data_list = []
        for row in all_rows:
            data_list.append((row[0], row[1]))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
    else:
        logfunc('No Partner Settings data available')

    db.close()
    return
Exemplo n.º 30
0
def get_locationDwifilocB(files_found, report_folder, seeker):
    file_found = str(files_found[0])
    #os.chmod(file_found, 0o0777)
    db = sqlite3.connect(file_found)
    cursor = db.cursor()
    # The following SQL query is taken from https://github.com/mac4n6/APOLLO/blob/master/modules/locationd_cacheencryptedAB_wifilocation.txt
    # from Sarah Edward's APOLLO project, and used under terms of its license found under Licenses/apollo.LICENSE.txt
    cursor.execute("""
	SELECT
	DATETIME(TIMESTAMP + 978307200,'UNIXEPOCH') AS "TIMESTAMP",
	LATITUDE || ", " || LONGITUDE AS "COORDINATES",
	MAC AS "MAC",
	CHANNEL AS "CHANNEL",
	INFOMASK AS "INFOMASK",
	SPEED AS "SPEED",
	COURSE AS "COURSE",
	CONFIDENCE AS "CONFIDENCE",
	SCORE AS "SCORE",
	REACH AS "REACH",
	HORIZONTALACCURACY AS "HORIZONTAL ACCURACY",
	VERTICALACCURACY AS "VERTICAL ACCURACY",
	LATITUDE AS "LATITUDE",
	LONGITUDE AS "LONGITUDE"
	FROM WIFILOCATION
	""")

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    if usageentries > 0:
        for row in all_rows:
            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], row[5], row[6],
                 row[7], row[8], row[9], row[10], row[11], row[12], row[13]))

        description = ''
        report = ArtifactHtmlReport('LocationD WiFi Location')
        report.start_artifact_report(report_folder, 'WiFi Location',
                                     description)
        report.add_script()
        data_headers = ('Timestamp', 'Coordinates', 'MAC', 'Channel',
                        'Infomask', 'Speed', 'Course', 'Confidence', 'Score',
                        'Reach', 'Horizontal Accuracy', 'Vertical Accuracy',
                        'Latitude', 'Longitude')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'LocationD WiFi Location'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = 'LocationD WiFi Location'
        timeline(report_folder, tlactivity, data_list, data_headers)

        kmlactivity = 'LocationD WiFi Location'
        kmlgen(report_folder, kmlactivity, data_list, data_headers)
    else:
        logfunc('No data available for LocationD WiFi Location')

    db.close()
    return