예제 #1
0
def _parse_contacts(database, result_path):
    """Parse contacts2.db

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('Contacts not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    data['title'] = 'contact'
    header = ('name', 'number')
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results > 0:
        for row in results:
            data_list.append((row[0], row[1]))

        data['data'] = data_list
    else:
        logger.warning('NO Contacts found!')

    return data
예제 #2
0
def _parse_sim_info(database, result_path):
    """Parse SIM Information.

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('SIM Information not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    data['title'] = 'sim_info'
    header = ('icc_id', 'sim_id','display_name', 'carrier_name')
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results >0:
        for row in results:
            data_list.append((row['icc_id'], row['sim_id'], row['display_name'], row['carrier_name']))

        data['data'] = data_list
    else:
        logger.warning('NO SIM Information found!')

    return data
예제 #3
0
def _parse_user_dict(database, result_path):
    """Parse User Dictionary.

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('User Dictionary not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    data['title'] = 'user_dict'
    header = ('word', 'frequency', 'locale', 'app_id', 'shortcut')
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results > 0:
        for row in results:
            data_list.append((row[0], row[1], row[2], row[3], row[4]))

        data['data'] = data_list
    else:
        logger.warning('NO User Dictionary found!')

    return data
예제 #4
0
def _parse_recent_files(database, result_path):
    """Parse myfiles.db.

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('Recent Files not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    header = ('name', 'size', 'timestamp', 'data', 'ext', 'source',
              'description', 'recent_timestamp')
    data['title'] = 'recent_files'
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results > 0:
        for row in results:
            data_list.append(
                (row[0], row[1],
                 datetime.datetime.fromtimestamp(
                     row[2] / 1000,
                     datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                 row[3], row[4], row[5], row[6],
                 datetime.datetime.fromtimestamp(
                     row[7] / 1000,
                     datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')))

        data['data'] = data_list
    else:
        logger.warning('NO Recent Files found!')

    return data
예제 #5
0
def process(target_files, func, result_path):
    """Android basic apps parsing process

    Args:
        target_files (list): target files.
        func (str): function name.
        result_path (str): result path.
    """
    try:
        if not os.path.exists(result_path):
            os.mkdir(result_path)

    except Exception as exception:
        logger.error(
            'cannot create result directory at path {0:s}:{1!s}'.format(
                result_path, exception))

    method = globals()[func]
    results = method(target_files, result_path)

    return results
예제 #6
0
def _parse_file_cache(database, result_path):
    """Parse FileCache.db.

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('File cache not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    header = ('storage', 'path', 'size', 'timestamp', 'latest')
    data['title'] = 'file_cache'
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results > 0:
        for row in results:
            data_list.append(
                (row[0], row[1], row[2],
                 datetime.datetime.fromtimestamp(
                     row[3] / 1000,
                     datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                 datetime.datetime.fromtimestamp(
                     row[4] / 1000,
                     datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')))

        data['data'] = data_list
    else:
        logger.warning('NO File cache found!')

    return data
예제 #7
0
def _parse_accounts_de(database, uid, result_path):
    """Parse accounts_de.db.

    Args:
        database (SQLite3): target SQLite3 database.
        uid (str): user id.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('Accounts not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    header = ('name', 'type', 'last_password_entry')
    data['title'] = 'accounts_de' + f'_{uid}'
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results > 0:
        for row in results:
            data_list.append(
                (row[0], row[1],
                 datetime.datetime.fromtimestamp(
                     row[2] / 1000,
                     datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')))

        data['data'] = data_list
    else:
        logger.warning('NO Accounts found!')

    return data
예제 #8
0
def _parse_call_logs(database, result_path):
    """Parse Call Logs.

    Args:
        database (SQLite3): target SQLite3 database.
        result_path (str): result path.
    """
    cursor = database.cursor()
    try:
        cursor.execute(query)
    except sqlite3.Error as exception:
        logger.error('Call Logs not found! {0!s}'.format(exception))

    results = cursor.fetchall()
    num_of_results = len(results)

    data = {}
    data['title'] = 'call_logs'
    header = ('phone_account_address', 'partner', 'call_date', 'type', 'duration_in_secs', 'partner_location',
              'country_iso', 'data', 'mime_type', 'transcription', 'deleted')
    data['number_of_data_headers'] = len(header)
    data['number_of_data'] = num_of_results
    data['data_header'] = header
    data_list = []
    if num_of_results >0:
        for row in results:
            data_list.append((row[0], row[1],
                  datetime.datetime.fromtimestamp(row[2]/1000, datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
                  row[3], str(row[4]), row[5], row[6], row[7], row[8], row[9],
                  str(row[10])))

        data['data'] = data_list
    else:
        logger.warning('NO Call Logs found!')

    return data
예제 #9
0
def _parse_usagestats(directory, uid, result_path):
    """Parse usagestats from /system/usagestats.

    Args:
        directory (str): target direcotry path.
        uid (str): user id.
        result_path (str): result path.
    """
    data = {}
    data['title'] = 'usagestats' + f'_{uid}'
    header = ('usage_type', 'last_time_active', 'time_active_in_msecs', 'time_active_in_secs',
              'last_time_service_used', 'last_time_visible', 'total_time_visible', 'app_launch_count',
              'package', 'types', 'class', 'source', 'all_attributes')
    data['number_of_data_headers'] = len(header)
    data['data_header'] = header
    data_list = []

    for file in glob.iglob(os.path.join(directory, '**'), recursive=True):
        if os.path.isfile(file):
            source = None
            filename = os.path.basename(file)

            if os.path.dirname(file).endswith('daily'):
                source = 'daily'
            elif os.path.dirname(file).endswith('weekly'):
                source = 'weekly'
            elif os.path.dirname(file).endswith('monthly'):
                source = 'monthly'
            elif os.path.dirname(file).endswith('yearly'):
                source = 'yearly'

            try:
                filename_int = int(filename)
            except:
                logger.error('Invalid File Name: {0:s}'.format(filename))

            try:
                tree = xml.etree.ElementTree.parse(file)
                root = tree.getroot()
                logger.info('processing: {0:s}'.format(file))
                for elem in root:
                    if elem.tag == 'packages':
                        usagestat = UsageStats()
                        usagestat.source = source
                        usagestat.usage_type = elem.tag
                        for subelem in elem:
                            usagestat.all_attributes = json.dumps(subelem.attrib)
                            last_time_active = int(subelem.attrib['lastTimeActive'])
                            if last_time_active < 0:
                                usagestat.last_time_active = abs(last_time_active)
                            else:
                                usagestat.last_time_active = filename_int + last_time_active

                            usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                 datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                            usagestat.package = subelem.attrib['package']
                            usagestat.time_active_in_msecs = subelem.attrib['timeActive']
                            usagestat.time_active_in_secs = int(usagestat.time_active_in_msecs) / 1000
                            usagestat.app_launch_count = subelem.attrib.get('appLaunchCount', None)

                            data_list.append(
                                (usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                                 usagestat.time_active_in_secs, usagestat.last_time_service_used,
                                 usagestat.last_time_visible,
                                 usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                                 usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))

                    elif elem.tag == 'configurations':
                        usagestat = UsageStats()
                        usagestat.source = source
                        usagestat.usage_type = elem.tag
                        for subelem in elem:
                            usagestat.all_attributes = json.dumps(subelem.attrib)
                            last_time_active = int(subelem.attrib['lastTimeActive'])
                            if last_time_active < 0:
                                usagestat.last_time_active = abs(last_time_active)
                            else:
                                usagestat.last_time_active = filename_int + last_time_active

                            usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                 datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                            usagestat.time_active_in_msecs = subelem.attrib['timeActive']
                            usagestat.time_active_in_secs = int(usagestat.time_active_in_msecs) / 1000

                            data_list.append(
                                (usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                                 usagestat.time_active_in_secs, usagestat.last_time_service_used,
                                 usagestat.last_time_visible,
                                 usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                                 usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))

                    elif elem.tag == 'event-log':
                        usagestat = UsageStats()
                        usagestat.source = source
                        usagestat.usage_type = elem.tag
                        for subelem in elem:
                            usagestat.all_attributes = json.dumps(subelem.attrib)
                            time = int(subelem.attrib['time'])
                            if time < 0:
                                usagestat.last_time_active = abs(time)
                            else:
                                usagestat.last_time_active = filename_int + time

                            usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                 datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                            usagestat.package = subelem.attrib['package']
                            usagestat.types = str(EventType(int(subelem.attrib['type'])))
                            usagestat.cls =  subelem.attrib.get('class', None)

                            data_list.append((usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                                    usagestat.time_active_in_secs, usagestat.last_time_service_used, usagestat.last_time_visible,
                                    usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                                    usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))


            except xml.etree.ElementTree.ParseError:
                # Perhaps an Android Q protobuf file
                try:
                    stats = _ReadUsageStatsPbFile(file)
                except:
                    logger.error('Parse Error: Non XML file and Non Protobuf file: {0:s}'.format(file))
                    continue

                if stats:
                    for stat in stats.packages:
                        usagestat = UsageStats()
                        usagestat.source = source
                        usagestat.usage_type = 'packages'
                        if stat.HasField('last_time_active_ms'):
                            last_time_active = stat.last_time_active_ms
                            if last_time_active < 0:
                                usagestat.last_time_active = abs(last_time_active)
                            else:
                                usagestat.last_time_active = filename_int + last_time_active

                            usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                        if stat.HasField('total_time_active_ms'):
                            usagestat.time_active_in_msecs = abs(stat.total_time_active_ms)

                        usagestat.package = stats.stringpool.strings[usagestat.package_index - 1]

                        if stat.HasField('app_launch_count'):
                            usagestat.app_launch_count = abs(stat.app_launch_count)

                        data_list.append(
                            (usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                             usagestat.time_active_in_secs, usagestat.last_time_service_used,
                             usagestat.last_time_visible,
                             usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                             usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))

                    for stat in stats.configurations:
                        usagestat = UsageStats()
                        usagestat.source = source
                        usagestat.usage_type = 'configurations'
                        if stat.HasField('last_time_active_ms'):
                            last_time_active = stat.last_time_active_ms
                            if last_time_active < 0:
                                usagestat.last_time_active = abs(last_time_active)
                            else:
                                usagestat.last_time_active = filename_int + last_time_active

                                usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                    datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                        if stat.HasField('total_time_active_ms'):
                            usagestat.time_active_in_msecs = abs(stat.total_time_active_ms)

                        usagestat.all_attributes = str(stat.config)

                        data_list.append(
                            (usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                             usagestat.time_active_in_secs, usagestat.last_time_service_used,
                             usagestat.last_time_visible,
                             usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                             usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))

                    for stat in stats.event_log:
                        if stat.HasField('time_ms'):
                            last_time_active = stat.time_ms
                            if last_time_active < 0:
                                usagestat.last_time_active = abs(last_time_active)
                            else:
                                usagestat.last_time_active = filename_int + last_time_active

                            usagestat.last_time_active = datetime.datetime.fromtimestamp(usagestat.last_time_active / 1000,
                                datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%fZ')

                        if stat.HasField('package_index'):
                            usagestat.package = stats.stringpool.strings[stat.package_index - 1]
                        if stat.HasField('package_index'):
                            usagestat.cls = stats.stringpool.strings[stat.class_index - 1]
                        if stat.HasField('type'):
                            usagestat.types = str(EventType(stat.type)) if stat.type <= 18 else str(stat.type)

                        data_list.append(
                            (usagestat.usage_type, usagestat.last_time_active, usagestat.time_active_in_msecs,
                             usagestat.time_active_in_secs, usagestat.last_time_service_used,
                             usagestat.last_time_visible,
                             usagestat.total_time_visible, usagestat.app_launch_count, usagestat.package,
                             usagestat.types, usagestat.cls, usagestat.source, usagestat.all_attributes))

                    continue

    data['number_of_data'] = len(data_list)
    data['data'] = data_list

    return data