Esempio n. 1
0
def ProcessStoreDb(input_file_path, input_file, output_path, output_params, items_to_compare, file_name_prefix, limit_output_types=True, no_path_file=False):
    '''Main spotlight store.db processing function
       file_name_prefix is used to name the excel sheet or sqlite table, as well as prefix for name of paths_file.
       limit_output_types=True will only write to SQLITE, else all output options are honored. This is for faster 
       processing, as writing to excel is very slow. We will still try to honor user preference if the db is small.
       items_to_compare is a dictionary used to compare and only write new items not present already
    '''
    items = {}
    global writer
    
    output_path_full_paths = os.path.join(output_path, file_name_prefix + '_fullpaths.csv')
    output_path_data = os.path.join(output_path, file_name_prefix + '_data.txt')

    log.info('Processing ' + input_file_path)
    try:
        if not os.path.exists(output_path):
            log.info("Creating output folder for spotlight at {}".format(output_path))
            os.makedirs(output_path)
        
        with open(output_path_data, 'wb') as output_file:
            output_paths_file = None
            store = spotlight_parser.SpotlightStore(input_file)
            store.ReadBlocksInSeq()
            ## create db, write table with fields.
            out_params = CopyOutputParams(output_params)
            if limit_output_types and (store.block0.item_count > 500): # Large db, limit to sqlite output
                log.warning('Since the spotlight database is large, only Sqlite output will be written!')
                out_params.write_xlsx = False
                out_params.write_csv = False
                if not out_params.write_sql: # sql is not enabled, must initialize database!
                    if not EnableSqliteDb(output_path, out_params, file_name_prefix): return None
            try:
                log.debug ("Trying to write extracted store data for {}".format(file_name_prefix))
                data_type_info = Get_Column_Info(store)
                writer = DataWriter(out_params, "Spotlight-" + file_name_prefix, data_type_info, input_file_path)
            except (sqlite3.Error, ValueError, IOError, OSError) as ex:
                log.exception ("Failed to initilize data writer")
                return None

            store.ParseMetadataBlocks(output_file, items, items_to_compare, ProcessStoreItems)
            writer.FinishWrites()
            
            # Write Paths db as csv
            if not no_path_file:
                path_type_info = [ ('ID',DataType.INTEGER),('FullPath',DataType.TEXT) ]
                fullpath_writer = DataWriter(out_params, "Spotlight-" + file_name_prefix + '-paths', path_type_info, input_file_path)
                with open(output_path_full_paths, 'wb') as output_paths_file:
                    log.info('Inodes and Path information being written to {}'.format(output_path_full_paths))
                    output_paths_file.write(b"Inode_Number\tFull_Path\r\n")
                    if items_to_compare: 
                        items_to_compare.update(items) # This updates items_to_compare ! 
                        WriteFullPaths(items, items_to_compare, output_paths_file, fullpath_writer)
                    else:
                        WriteFullPaths(items, items, output_paths_file, fullpath_writer)
                    if out_params.write_sql: 
                        CreateViewAndIndexes(data_type_info, fullpath_writer.sql_writer, file_name_prefix)
                fullpath_writer.FinishWrites()                
            return items
    except Exception as ex:
        log.exception('Exception processing spotlight store db file')
Esempio n. 2
0
def ProcessStoreDb(input_file_path,
                   input_file,
                   output_path,
                   output_params,
                   items_to_compare,
                   file_name_prefix,
                   limit_output_types=True,
                   no_path_file=False,
                   user=""):
    '''Main spotlight store.db processing function
       file_name_prefix is used to name the excel sheet or sqlite table, as well as prefix for name of paths_file.
       limit_output_types=True will only write to SQLITE, else all output options are honored. This is for faster 
       processing, as writing to excel is very slow. We will still try to honor user preference if the db is small.
       items_to_compare is a dictionary used to compare and only write new items not present already
    '''
    items = {}
    global writer

    output_path_full_paths = os.path.join(output_path,
                                          file_name_prefix + '_fullpaths.csv')
    output_path_data = os.path.join(output_path,
                                    file_name_prefix + '_data.txt')

    log.info('Processing ' + input_file_path)
    try:
        if not os.path.exists(output_path):
            log.info("Creating output folder for spotlight at {}".format(
                output_path))
            os.makedirs(output_path)

        with open(output_path_data, 'wb') as output_file:
            output_paths_file = None
            store = spotlight_parser.SpotlightStore(input_file)
            if store.is_ios_store:  # The properties, categories and indexes must be stored in external files
                input_folder = os.path.dirname(input_file_path)
                try:
                    prop_map_data, prop_map_offsets, prop_map_header = GetMapDataOffsetHeader(
                        input_folder, 1, user)
                    cat_map_data, cat_map_offsets, cat_map_header = GetMapDataOffsetHeader(
                        input_folder, 2, user)
                    idx_1_map_data, idx_1_map_offsets, idx_1_map_header = GetMapDataOffsetHeader(
                        input_folder, 4, user)
                    idx_2_map_data, idx_2_map_offsets, idx_2_map_header = GetMapDataOffsetHeader(
                        input_folder, 5, user)

                    store.ParsePropertiesFromFileData(prop_map_data,
                                                      prop_map_offsets,
                                                      prop_map_header)
                    store.ParseCategoriesFromFileData(cat_map_data,
                                                      cat_map_offsets,
                                                      cat_map_header)
                    store.ParseIndexesFromFileData(idx_1_map_data,
                                                   idx_1_map_offsets,
                                                   idx_1_map_header,
                                                   store.indexes_1)
                    store.ParseIndexesFromFileData(idx_2_map_data,
                                                   idx_2_map_offsets,
                                                   idx_2_map_header,
                                                   store.indexes_2,
                                                   has_extra_byte=True)

                    store.ReadPageIndexesAndOtherDefinitions(True)
                except:
                    log.exception(
                        'Failed to find or process one or more dependency files. Cannot proceed!'
                    )
                    return None
            ##
            else:
                store.ReadPageIndexesAndOtherDefinitions()
            ## create db, write table with fields.
            out_params = CopyOutputParams(output_params)
            if limit_output_types and (store.block0.item_count > 500
                                       ):  # Large db, limit to sqlite output
                log.warning(
                    'Since the spotlight database is large, only Sqlite output will be written!'
                )
                out_params.write_xlsx = False
                out_params.write_csv = False
                if not out_params.write_sql:  # sql is not enabled, must initialize database!
                    if not EnableSqliteDb(output_path, out_params,
                                          file_name_prefix):
                        return None
            try:
                log.debug("Trying to write extracted store data for {}".format(
                    file_name_prefix))
                data_type_info = Get_Column_Info(store)
                writer = DataWriter(out_params,
                                    "Spotlight-" + file_name_prefix,
                                    data_type_info, input_file_path)
            except (sqlite3.Error, ValueError, OSError) as ex:
                log.exception("Failed to initilize data writer")
                return None

            total_items_parsed = store.ParseMetadataBlocks(
                output_file, items, items_to_compare, ProcessStoreItems)
            writer.FinishWrites()

            if total_items_parsed == 0:
                log.debug('Nothing was parsed from this file!')
            # create Views in ios/user style db
            if store.is_ios_store and (total_items_parsed > 0):
                create_views_for_ios_db(writer.sql_writer.filepath,
                                        writer.sql_writer.table_name)

            # Write Paths db as csv
            if (not store.is_ios_store) and (not no_path_file):
                path_type_info = [('ID', DataType.INTEGER),
                                  ('FullPath', DataType.TEXT)]
                fullpath_writer = DataWriter(
                    out_params, "Spotlight-" + file_name_prefix + '-paths',
                    path_type_info, input_file_path)
                with open(output_path_full_paths, 'wb') as output_paths_file:
                    log.info('Inodes and Path information being written to {}'.
                             format(output_path_full_paths))
                    output_paths_file.write(b"Inode_Number\tFull_Path\r\n")
                    if items_to_compare:
                        items_to_compare.update(
                            items)  # This updates items_to_compare !
                        WriteFullPaths(items, items_to_compare,
                                       output_paths_file, fullpath_writer)
                    else:
                        WriteFullPaths(items, items, output_paths_file,
                                       fullpath_writer)
                    if out_params.write_sql:
                        CreateViewAndIndexes(data_type_info,
                                             fullpath_writer.sql_writer,
                                             file_name_prefix)
                fullpath_writer.FinishWrites()
            return items
    except Exception as ex:
        log.exception('Exception processing spotlight store db file')