예제 #1
0
    def _ReadStructureFromFileObject(self, file_object, file_offset,
                                     data_type_map):
        """Reads a structure from a file-like object.

    If the data type map has a fixed size this method will read the predefined
    number of bytes from the file-like object. If the data type map has a
    variable size, depending on values in the byte stream, this method will
    continue to read from the file-like object until the data type map can be
    successfully mapped onto the byte stream or until an error occurs.

    Args:
      file_object (dfvfs.FileIO): a file-like object to parse.
      file_offset (int): offset of the structure data relative to the start
          of the file-like object.
      data_type_map (dtfabric.DataTypeMap): data type map of the structure.

    Returns:
      tuple[object, int]: structure values object and data size of
          the structure.

    Raises:
      ParseError: if the structure cannot be read.
      ValueError: if file-like object or data type map is missing.
    """
        context = None
        data = b''
        last_data_size = 0

        data_size = data_type_map.GetByteSize()
        if not data_size:
            data_size = data_type_map.GetSizeHint()

        while data_size != last_data_size:
            read_offset = file_offset + last_data_size
            read_size = data_size - last_data_size
            data_segment = self._ReadData(file_object, read_offset, read_size)

            data = b''.join([data, data_segment])

            try:
                context = dtfabric_data_maps.DataTypeMapContext()
                structure_values_object = data_type_map.MapByteStream(
                    data, context=context)
                return structure_values_object, data_size

            except dtfabric_errors.ByteStreamTooSmallError:
                pass

            except dtfabric_errors.MappingError as exception:
                raise errors.ParseError((
                    'Unable to map {0:s} data at offset: 0x{1:08x} with error: '
                    '{2!s}').format(data_type_map.name, file_offset,
                                    exception))

            last_data_size = data_size
            data_size = data_type_map.GetSizeHint(context=context)

        raise errors.ParseError(
            'Unable to read {0:s} at offset: 0x{1:08x}'.format(
                data_type_map.name, file_offset))
  def _ReadRecordAttributeValueOffset(
      self, file_object, file_offset, number_of_attribute_values):
    """Reads the record attribute value offsets.

    Args:
      file_object (file): file-like object.
      file_offset (int): offset of the record attribute values offsets relative
          to the start of the file.
      number_of_attribute_values (int): number of attribute values.

    Returns:
      keychain_record_attribute_value_offsets: record attribute value offsets.

    Raises:
      ParseError: if the record attribute value offsets cannot be read.
    """
    offsets_data_size = number_of_attribute_values * 4

    offsets_data = file_object.read(offsets_data_size)

    context = dtfabric_data_maps.DataTypeMapContext(values={
        'number_of_attribute_values': number_of_attribute_values})

    data_type_map = self._GetDataTypeMap(
        'keychain_record_attribute_value_offsets')

    try:
      attribute_value_offsets = self._ReadStructureFromByteStream(
          offsets_data, file_offset, data_type_map, context=context)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError((
          'Unable to map record attribute value offsets data at offset: '
          '0x{0:08x} with error: {1!s}').format(file_offset, exception))

    return attribute_value_offsets
예제 #3
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Safari binary cookie file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): file-like object to be parsed.

    Raises:
      UnableToParseFile: when the file cannot be parsed, this will signal
          the event extractor to apply other parsers.
    """
        file_header_map = self._GetDataTypeMap('binarycookies_file_header')

        try:
            file_header, file_header_data_size = self._ReadStructureFromFileObject(
                file_object, 0, file_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile(
                'Unable to read file header with error: {0!s}.'.format(
                    exception))

        if file_header.signature != self._SIGNATURE:
            raise errors.UnableToParseFile('Unsupported file signature.')

        file_offset = file_header_data_size

        # TODO: move page sizes array into file header, this will require dtFabric
        # to compare signature as part of data map.
        page_sizes_data_size = file_header.number_of_pages * 4

        page_sizes_data = file_object.read(page_sizes_data_size)

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'binarycookies_file_header': file_header})

        page_sizes_map = self._GetDataTypeMap('binarycookies_page_sizes')

        try:
            page_sizes_array = self._ReadStructureFromByteStream(
                page_sizes_data, file_offset, page_sizes_map, context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError((
                'Unable to map page sizes data at offset: 0x{0:08x} with error: '
                '{1!s}').format(file_offset, exception))

        file_offset += page_sizes_data_size

        for page_number, page_size in enumerate(page_sizes_array):
            if parser_mediator.abort:
                break

            page_data = file_object.read(page_size)
            if len(page_data) != page_size:
                parser_mediator.ProduceExtractionWarning(
                    'unable to read page: {0:d}'.format(page_number))
                break

            self._ParsePage(parser_mediator, file_offset, page_data)

            file_offset += page_size
예제 #4
0
    def _ParseMRUListValue(self, registry_key):
        """Parses the MRUList value in a given Registry key.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUList value.

    Returns:
      mrulist_entries: MRUList entries or None if not available.
    """
        mrulist_value = registry_key.GetValueByName('MRUList')

        # The key exists but does not contain a value named "MRUList".
        if not mrulist_value:
            return None

        mrulist_entries_map = self._GetDataTypeMap('mrulist_entries')

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'data_size': len(mrulist_value.data)})

        return self._ReadStructureFromByteStream(mrulist_value.data,
                                                 0,
                                                 mrulist_entries_map,
                                                 context=context)
예제 #5
0
  def _ReadTransitionTimes64bit(self, file_object, file_header):
    """Reads 64-bit transition times.

    Args:
      file_object (file): file-like object.
      file_header (tzif_file_header): file header.

    Raises:
      ParseError: if the 64-bit transition times cannot be read.
    """
    file_offset = file_object.tell()
    data_type_map = self._GetDataTypeMap('tzif_transition_times_64bit')

    data_size = 8 * file_header.number_of_transition_times

    data = self._ReadData(
        file_object, file_offset, data_size, '64-bit transition times')

    context = dtfabric_data_maps.DataTypeMapContext(values={
        'tzif_file_header': file_header})

    try:
      transition_times = self._ReadStructureFromByteStream(
          data, file_offset, data_type_map, '64-bit transition times',
          context=context)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError((
          'Unable to parse 64-bit transition times value with error: '
          '{0!s}').format(exception))

    if self._debug:
      self._DebugPrintTransitionTimes(transition_times)
예제 #6
0
    def _ParseCachedEntry10(self, value_data, cached_entry_offset):
        """Parses a Windows 10 cached entry.

    Args:
      value_data (bytes): value data.
      cached_entry_offset (int): offset of the first cached entry data
          relative to the start of the value data.

    Returns:
      AppCompatCacheCachedEntry: cached entry.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        try:
            cached_entry = self._ReadStructureFromByteStream(
                value_data[cached_entry_offset:], cached_entry_offset,
                self._cached_entry_data_type_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse cached entry value with error: {0!s}'.format(
                    exception))

        if cached_entry.signature not in (self._CACHED_ENTRY_SIGNATURE_8_0,
                                          self._CACHED_ENTRY_SIGNATURE_8_1):
            raise errors.ParseError('Unsupported cache entry signature')

        cached_entry_data = value_data[cached_entry_offset:]

        data_type_map = self._GetDataTypeMap(
            'appcompatcache_cached_entry_body_10')
        context = dtfabric_data_maps.DataTypeMapContext()

        try:
            cached_entry_body = self._ReadStructureFromByteStream(
                cached_entry_data[12:],
                cached_entry_offset + 12,
                data_type_map,
                context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse cached entry body with error: {0!s}'.format(
                    exception))

        data_offset = cached_entry_offset + context.byte_size
        data_size = cached_entry_body.data_size

        cached_entry_object = AppCompatCacheCachedEntry()
        cached_entry_object.cached_entry_size = (
            12 + cached_entry.cached_entry_data_size)
        cached_entry_object.last_modification_time = (
            cached_entry_body.last_modification_time)
        cached_entry_object.path = cached_entry_body.path

        if data_size > 0:
            cached_entry_object.data = cached_entry_data[
                data_offset:data_offset + data_size]

        return cached_entry_object
예제 #7
0
    def _ReadFileHeader(self, file_object):
        """Reads the file header.

    Args:
      file_object (file): file-like object.

    Raises:
      ParseError: if the file header cannot be read.
    """
        data_type_map = self._GetDataTypeMap('binarycookies_file_header')

        file_header, file_header_data_size = self._ReadStructureFromFileObject(
            file_object, 0, data_type_map, 'file header')

        if self._debug:
            self._DebugPrintFileHeader(file_header)

        file_offset = file_header_data_size

        # TODO: move page sizes array into file header, this will require dtFabric
        # to compare signature as part of data map.
        # TODO: check for upper limit.
        page_sizes_data_size = file_header.number_of_pages * 4

        page_sizes_data = file_object.read(page_sizes_data_size)

        if self._debug:
            self._DebugPrintData('Page sizes data', page_sizes_data)

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'binarycookies_file_header': file_header})

        data_type_map = self._GetDataTypeMap('binarycookies_page_sizes')

        try:
            page_sizes_array = self._ReadStructureFromByteStream(
                page_sizes_data,
                file_offset,
                data_type_map,
                'page sizes',
                context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError((
                'Unable to map page sizes data at offset: 0x{0:08x} with error: '
                '{1!s}').format(file_offset, exception))

        self._page_sizes = []
        if file_header.number_of_pages > 0:
            for index, page_size in enumerate(page_sizes_array):
                self._page_sizes.append(page_size)

                if self._debug:
                    description = 'Page: {0:d} size'.format(index)
                    value_string = '{0:d}'.format(page_size)
                    self._DebugPrintValue(description, value_string)

            if self._debug:
                self._DebugPrintText('\n')
예제 #8
0
    def _ReadRecordHeader(self, data, page_data_offset):
        """Reads a record header.

    Args:
      data (bytes): data.
      page_data_offset (int): offset of the page value relative to the start
          of the page data.

    Returns:
      tuple[SpotlightStoreMetadataItem, int]: metadata item and number of
          bytes read.

    Raises:
      ParseError: if the record page cannot be read.
    """
        data_type_map = self._GetDataTypeMap('spotlight_store_db_record')

        context = dtfabric_data_maps.DataTypeMapContext()

        try:
            record = data_type_map.MapByteStream(data, context=context)
        except dtfabric_errors.MappingError as exception:
            raise errors.ParseError(
                ('Unable to map record at offset: 0x{0:08x} with error: '
                 '{1!s}').format(page_data_offset, exception))

        data_offset = context.byte_size

        identifier, bytes_read = self._ReadVariableSizeInteger(
            data[data_offset:])

        data_offset += bytes_read

        flags = data[data_offset]

        data_offset += 1

        value_names = [
            'item_identifier', 'parent_identifier', 'last_update_time'
        ]
        values, bytes_read = self._ReadVariableSizeIntegers(
            data[data_offset:], value_names)

        data_offset += bytes_read

        metadata_item = SpotlightStoreMetadataItem()
        metadata_item.data_size = record.data_size
        metadata_item.flags = flags
        metadata_item.identifier = identifier
        metadata_item.item_identifier = values.get('item_identifier')
        metadata_item.last_update_time = values.get('last_update_time')
        metadata_item.parent_identifier = values.get('parent_identifier')

        return metadata_item, data_offset
예제 #9
0
    def _ReadRecordAttributeValueOffset(self, file_object, file_offset,
                                        number_of_attribute_values):
        """Reads the record attribute value offsets.

    Args:
      file_object (file): file-like object.
      file_offset (int): offset of the record attribute values offsets relative
          to the start of the file.
      number_of_attribute_values (int): number of attribute values.

    Returns:
      keychain_record_attribute_value_offsets: record attribute value offsets.

    Raises:
      ParseError: if the record attribute value offsets cannot be read.
    """
        offsets_data_size = number_of_attribute_values * 4

        offsets_data = file_object.read(offsets_data_size)

        if self._debug:
            self._DebugPrintData('Attribute value offsets data', offsets_data)

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'number_of_attribute_values': number_of_attribute_values})

        data_type_map = self._GetDataTypeMap(
            'keychain_record_attribute_value_offsets')

        try:
            attribute_value_offsets = self._ReadStructureFromByteStream(
                offsets_data,
                file_offset,
                data_type_map,
                'record attribute value offsets',
                context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError((
                'Unable to map record attribute value offsets data at offset: '
                '0x{0:08x} with error: {1!s}').format(file_offset, exception))

        if self._debug:
            for index, attribute_value_offset in enumerate(
                    attribute_value_offsets):
                description_string = 'Attribute value offset: {0:d}'.format(
                    index)
                value_string = self._FormatIntegerAsHexadecimal8(
                    attribute_value_offset)
                self._DebugPrintValue(description_string, value_string)

            self._DebugPrintText('\n')

        return attribute_value_offsets
예제 #10
0
  def _ReadMetadataAttributeFloat64Value(self, property_type, data):
    """Reads a metadata attribute 64-bit floating-point value.

    Args:
      property_type (int): metadata attribute property type.
      data (bytes): data.

    Returns:
      tuple[object, int]: value and number of bytes read.

    Raises:
      ParseError: if the metadata attribute 64-bit floating-point value cannot
          be read.
    """
    if property_type & 0x02 == 0x00:
      data_size, bytes_read = 8, 0
    else:
      data_size, bytes_read = self._ReadVariableSizeInteger(data)

    data_type_map = self._GetDataTypeMap('array_of_float64')

    context = dtfabric_data_maps.DataTypeMapContext(values={
        'elements_data_size': data_size})

    try:
      array_of_values = data_type_map.MapByteStream(
          data[bytes_read:bytes_read + data_size], context=context)

    except dtfabric_errors.MappingError as exception:
      raise errors.ParseError((
          'Unable to parse array of 64-bit floating-point values with error: '
          '{0!s}').format(exception))

    if bytes_read == 0:
      value = array_of_values[0]
    else:
      value = array_of_values

    bytes_read += data_size

    return value, bytes_read
예제 #11
0
  def _ReadMetadataAttributeStringValue(self, property_type, data):
    """Reads a metadata attribute string value.

    Args:
      property_type (int): metadata attribute property type.
      data (bytes): data.

    Returns:
      tuple[object, int]: value and number of bytes read.

    Raises:
      ParseError: if the metadata attribute string value cannot be read.
    """
    data_size, bytes_read = self._ReadVariableSizeInteger(data)

    data_type_map = self._GetDataTypeMap('array_of_cstring')

    context = dtfabric_data_maps.DataTypeMapContext(values={
        'elements_data_size': data_size})

    try:
      array_of_values = data_type_map.MapByteStream(
          data[bytes_read:bytes_read + data_size], context=context)

    except dtfabric_errors.MappingError as exception:
      raise errors.ParseError(
          'Unable to parse array of string values with error: {0!s}'.format(
              exception))

    if property_type & 0x03 == 0x03:
      value = array_of_values[0]
      if '\x16\x02' in value:
        value = value.split('\x16\x02')[0]
    elif property_type & 0x03 == 0x02:
      value = array_of_values
    else:
      value = array_of_values[0]

    bytes_read += data_size

    return value, bytes_read
예제 #12
0
  def _ReadPropertyPageValues(self, page_header, page_data, property_table):
    """Reads the property page values.

    Args:
      page_header (spotlight_store_db_property_page_header): page header.
      page_data (bytes): page data.
      property_table (dict[int, object]): property table in which to store the
          property page values.

    Raises:
      ParseError: if the property page values cannot be read.
    """
    if page_header.property_table_type == 0x00000011:
      data_type_map = self._GetDataTypeMap(
          'spotlight_store_db_property_value11')

    elif page_header.property_table_type == 0x00000021:
      data_type_map = self._GetDataTypeMap(
          'spotlight_store_db_property_value21')

    page_data_offset = 12
    page_data_size = page_header.used_page_size - 20
    page_value_index = 0

    while page_data_offset < page_data_size:
      context = dtfabric_data_maps.DataTypeMapContext()

      try:
        property_value = data_type_map.MapByteStream(
            page_data[page_data_offset:], context=context)
      except dtfabric_errors.MappingError as exception:
        raise errors.ParseError((
            'Unable to map property value data at offset: 0x{0:08x} with '
            'error: {1!s}').format(page_data_offset, exception))

      property_table[property_value.table_index] = property_value

      page_data_offset += context.byte_size
      page_value_index += 1
예제 #13
0
    def Parse(self, value_data):
        """Parses the value data.

    Args:
      value_data (bytes): value data.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        if self._debug:
            self._DebugPrintData('Value data', value_data)

        header, value_data_offset = self._ParseHeader(value_data)

        if header.format_version == 1:
            value_data_offset += 4

        elif header.format_version == 9:
            data_type_map = self._GetDataTypeMap('programscache_header9')
            context = dtfabric_data_maps.DataTypeMapContext()

            try:
                header9 = self._ReadStructureFromByteStream(
                    value_data[value_data_offset:],
                    value_data_offset,
                    data_type_map,
                    'header9',
                    context=context)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError(
                    'Unable to parse header9 value with error: {0!s}'.format(
                        exception))

            value_data_offset += context.byte_size

            if self._debug:
                value_string = '0x{0:08x}'.format(header9.unknown1)
                self._DebugPrintValue('Unknown1', value_string)

        elif header.format_version in (12, 19):
            uuid_object = uuid.UUID(bytes_le=value_data[4:20])
            value_data_offset += 16

            if self._debug:
                value_string = '{0!s}'.format(uuid_object)
                self._DebugPrintValue('Known folder identifier', value_string)

        sentinel = 0
        if header.format_version != 9:
            entry_footer, data_size = self._ParseEntryFooter(
                value_data, value_data_offset)

            value_data_offset += data_size

            sentinel = entry_footer.sentinel

        if self._debug:
            self._DebugPrintText('\n')

        value_data_size = len(value_data)
        while sentinel in (0, 1):
            if value_data_offset >= value_data_size:
                break

            data_type_map = self._GetDataTypeMap('programscache_entry_header')
            context = dtfabric_data_maps.DataTypeMapContext()

            try:
                entry_header = self._ReadStructureFromByteStream(
                    value_data[value_data_offset:],
                    value_data_offset,
                    data_type_map,
                    'entry header',
                    context=context)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError(
                    'Unable to parse entry header value with error: {0!s}'.
                    format(exception))

            if self._debug:
                value_string = '0x{0:08x}'.format(value_data_offset)
                self._DebugPrintValue('Entry data offset', value_string)

                self._DebugPrintEntryHeader(entry_header)

            value_data_offset += context.byte_size

            entry_data_size = entry_header.data_size

            shell_item_list = pyfwsi.item_list()
            shell_item_list.copy_from_byte_stream(
                value_data[value_data_offset:])

            for shell_item in iter(shell_item_list.items):
                if self._debug:
                    self._DebugPrintShellItem(shell_item)

            value_data_offset += entry_data_size

            entry_footer, data_size = self._ParseEntryFooter(
                value_data, value_data_offset)

            value_data_offset += data_size

            if self._debug:
                self._DebugPrintText('\n')

            if entry_footer.sentinel == 2 and value_data_offset < value_data_size:
                # TODO: determine the logic to this value.
                while ord(value_data[value_data_offset]) != 0x00:
                    value_data_offset += 1
                value_data_offset += 7

                entry_footer, data_size = self._ParseEntryFooter(
                    value_data, value_data_offset)

                value_data_offset += data_size

                if self._debug:
                    self._DebugPrintText('\n')

        if value_data_offset < value_data_size:
            value_string = '0x{0:08x}'.format(value_data_offset)
            self._DebugPrintValue('Trailing data offset', value_string)

            self._DebugPrintData('Trailing data:',
                                 value_data[value_data_offset:])
예제 #14
0
    def ParseCachedEntry(self, format_type, value_data, cached_entry_index,
                         cached_entry_offset):
        """Parses a cached entry.

    Args:
      format_type (int): format type.
      value_data (bytes): value data.
      cached_entry_index (int): cached entry index.
      cached_entry_offset (int): offset of the first cached entry data
          relative to the start of the value data.

    Returns:
      AppCompatCacheCachedEntry: cached entry.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        if not self._cached_entry_data_type_map:
            self._cached_entry_data_type_map = self._GetCachedEntryDataTypeMap(
                format_type, value_data, cached_entry_offset)

        if not self._cached_entry_data_type_map:
            raise errors.ParseError(
                'Unable to determine cached entry data type.')

        cached_entry_size = self._cached_entry_data_type_map.GetSizeHint()
        cached_entry_end_offset = cached_entry_offset + cached_entry_size
        cached_entry_data = value_data[
            cached_entry_offset:cached_entry_end_offset]

        if self._debug:
            if format_type not in (self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
                description = 'Cached entry: {0:d} data'.format(
                    cached_entry_index)
                self._DebugPrintData(description, cached_entry_data)

        try:
            cached_entry = self._ReadStructureFromByteStream(
                cached_entry_data, cached_entry_offset,
                self._cached_entry_data_type_map, 'cached entry')
        except (ValueError, errors.ParseError) as exception:
            if self._debug:
                if format_type in (self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
                    description = 'Cached entry: {0:d} header data'.format(
                        cached_entry_index)
                    self._DebugPrintData(description, cached_entry_data)

            raise errors.ParseError(
                'Unable to parse cached entry value with error: {0!s}'.format(
                    exception))

        if format_type in (self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
            if cached_entry.signature not in (
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1):
                if self._debug:
                    description = 'Cached entry: {0:d} header data'.format(
                        cached_entry_index)
                    self._DebugPrintData(description, cached_entry_data)

                raise errors.ParseError('Unsupported cache entry signature')

        cached_entry_object = AppCompatCacheCachedEntry()

        data_offset = 0
        data_size = 0

        if format_type == self._FORMAT_TYPE_XP:
            if self._debug:
                self._DebugPrintCachedEntryXP(cached_entry)

            # TODO: have dtFabric handle string conversion.
            string_size = 0
            for string_index in range(0, 528, 2):
                if (cached_entry.path[string_index] == 0
                        and cached_entry.path[string_index + 1] == 0):
                    break
                string_size += 2

            last_modification_time = cached_entry.last_modification_time
            path = bytearray(
                cached_entry.path[0:string_size]).decode('utf-16-le')

            cached_entry_object.last_update_time = cached_entry.last_update_time

        elif format_type in (self._FORMAT_TYPE_2003, self._FORMAT_TYPE_VISTA,
                             self._FORMAT_TYPE_7):
            if self._debug:
                self._DebugPrintCachedEntry2003(cached_entry)

            last_modification_time = cached_entry.last_modification_time

            if format_type in (self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7):
                cached_entry_object.insertion_flags = cached_entry.insertion_flags
                cached_entry_object.shim_flags = cached_entry.shim_flags

            path_size = cached_entry.path_size
            maximum_path_size = cached_entry.maximum_path_size
            path_offset = cached_entry.path_offset

            if path_offset > 0 and path_size > 0:
                path_size += path_offset
                maximum_path_size += path_offset

                if self._debug:
                    self._DebugPrintData(
                        'Path data', value_data[path_offset:maximum_path_size])

                path = value_data[path_offset:path_size].decode('utf-16-le')

                if self._debug:
                    self._DebugPrintValue('Path', path)

            if format_type == self._FORMAT_TYPE_7:
                data_offset = cached_entry.data_offset
                data_size = cached_entry.data_size

        elif format_type in (self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
            cached_entry_data_size = cached_entry.cached_entry_data_size
            cached_entry_size = 12 + cached_entry_data_size
            cached_entry_end_offset = cached_entry_offset + cached_entry_size

            cached_entry_data = value_data[
                cached_entry_offset:cached_entry_end_offset]

            if self._debug:
                description = 'Cached entry: {0:d} data'.format(
                    cached_entry_index)
                self._DebugPrintData(description, cached_entry_data)

            if format_type == self._FORMAT_TYPE_10:
                data_type_map_name = 'appcompatcache_cached_entry_body_10'
            elif cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_0:
                data_type_map_name = 'appcompatcache_cached_entry_body_8_0'
            elif cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_1:
                data_type_map_name = 'appcompatcache_cached_entry_body_8_1'

            data_type_map = self._GetDataTypeMap(data_type_map_name)
            context = dtfabric_data_maps.DataTypeMapContext()

            try:
                cached_entry_body = self._ReadStructureFromByteStream(
                    cached_entry_data[12:],
                    cached_entry_offset + 12,
                    data_type_map,
                    'cached entry body',
                    context=context)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError(
                    'Unable to parse cached entry body with error: {0!s}'.
                    format(exception))

            if self._debug:
                self._DebugPrintCachedEntry8(cached_entry, cached_entry_body)

            last_modification_time = cached_entry_body.last_modification_time
            path = cached_entry_body.path

            if format_type == self._FORMAT_TYPE_8:
                cached_entry_object.insertion_flags = cached_entry_body.insertion_flags
                cached_entry_object.shim_flags = cached_entry_body.shim_flags

            data_offset = cached_entry_offset + context.byte_size
            data_size = cached_entry_body.data_size

        if self._debug:
            self._DebugPrintText('\n')

        cached_entry_object.cached_entry_size = cached_entry_size
        cached_entry_object.file_size = getattr(cached_entry, 'file_size',
                                                None)
        cached_entry_object.last_modification_time = last_modification_time
        cached_entry_object.path = path

        if data_size > 0:
            cached_entry_object.data = value_data[data_offset:data_offset +
                                                  data_size]

            if self._debug:
                self._DebugPrintData('Data', cached_entry_object.data)

        return cached_entry_object
예제 #15
0
  def _ReadIndexPageValues(self, page_header, page_data, property_table):
    """Reads the index page values.

    Args:
      page_header (spotlight_store_db_property_page_header): page header.
      page_data (bytes): page data.
      property_table (dict[int, object]): property table in which to store the
          property page values.

    Raises:
      ParseError: if the property page values cannot be read.
    """
    data_type_map = self._GetDataTypeMap('spotlight_store_db_property_value81')
    index_values_data_type_map = self._GetDataTypeMap(
        'spotlight_store_db_index_values')

    page_data_offset = 12
    page_data_size = page_header.used_page_size - 20
    page_value_index = 0

    while page_data_offset < page_data_size:
      try:
        property_value = data_type_map.MapByteStream(
            page_data[page_data_offset:])
      except dtfabric_errors.MappingError as exception:
        raise errors.ParseError((
            'Unable to map property value data at offset: 0x{0:08x} with '
            'error: {1!s}').format(page_data_offset, exception))

      page_value_size = 4

      index_size, bytes_read = self._ReadVariableSizeInteger(
          page_data[page_data_offset + page_value_size:])

      _, padding_size = divmod(index_size, 4)

      page_value_size += bytes_read + padding_size
      index_size -= padding_size

      context = dtfabric_data_maps.DataTypeMapContext(values={
          'index_size': index_size})

      try:
        index_values = index_values_data_type_map.MapByteStream(
            page_data[page_data_offset + page_value_size:], context=context)

      except dtfabric_errors.MappingError as exception:
        raise errors.ParseError((
            'Unable to parse index data at offset: 0x{0:08x} with error: '
            '{1:s}').format(page_data_offset + page_value_size, exception))

      page_value_size += index_size

      values_list = []
      for metadata_value_index in index_values:
        metadata_value = self._metadata_values.get(metadata_value_index, None)
        value_string = getattr(metadata_value, 'value_name', '')
        values_list.append(value_string)

      setattr(property_value, 'values_list', values_list)

      property_table[property_value.table_index] = property_value

      page_data_offset += page_value_size
      page_value_index += 1
예제 #16
0
    def ParseHeader(self, format_type, value_data):
        """Parses the header.

    Args:
      format_type (int): format type.
      value_data (bytes): value data.

    Returns:
      AppCompatCacheHeader: header.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        data_type_map_name = self._HEADER_DATA_TYPE_MAP_NAMES.get(
            format_type, None)
        if not data_type_map_name:
            raise errors.ParseError(
                'Unsupported format type: {0:d}'.format(format_type))

        data_type_map = self._GetDataTypeMap(data_type_map_name)
        context = dtfabric_data_maps.DataTypeMapContext()

        try:
            header = self._ReadStructureFromByteStream(value_data,
                                                       0,
                                                       data_type_map,
                                                       'header',
                                                       context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse header value with error: {0!s}'.format(
                    exception))

        header_data_size = context.byte_size
        if format_type == self._FORMAT_TYPE_10:
            header_data_size = header.signature

        cache_header = AppCompatCacheHeader()
        cache_header.header_size = header_data_size
        cache_header.number_of_cached_entries = getattr(
            header, 'number_of_cached_entries', None)

        if self._debug:
            self._DebugPrintHeader(format_type, header)

        if format_type == self._FORMAT_TYPE_XP:
            if self._debug:
                self._DebugPrintText('LRU entries:')

            data_offset = 16
            number_of_lru_entries = header.number_of_lru_entries
            if 0 <= number_of_lru_entries <= 96:
                data_type_map = self._GetDataTypeMap('uint32le')

                for lru_entry_index in range(number_of_lru_entries):
                    try:
                        lru_entry = self._ReadStructureFromByteStream(
                            value_data[data_offset:data_offset + 4],
                            data_offset, data_type_map, 'LRU entry')
                    except (ValueError, errors.ParseError) as exception:
                        raise errors.ParseError(
                            'Unable to parse LRU entry value with error: {0!s}'
                            .format(exception))

                    data_offset += 4

                    if self._debug:
                        description = 'LRU entry: {0:d}'.format(
                            lru_entry_index)
                        value_string = '{0:d} (offset: 0x{1:08x})'.format(
                            lru_entry, 400 + (lru_entry * 552))
                        self._DebugPrintValue(description, value_string)

                if self._debug:
                    self._DebugPrintText('\n')

            if self._debug:
                self._DebugPrintData('Unknown data',
                                     value_data[data_offset:400])

        self._cached_entry_data_type_map = None

        return cache_header
예제 #17
0
  def _ParseMRUListExEntryValue(
      self, parser_mediator, registry_key, entry_index, entry_number,
      codepage='cp1252', **kwargs):
    """Parses the MRUListEx entry value.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUListEx value.
      entry_index (int): MRUListEx entry index.
      entry_number (int): entry number.
      codepage (Optional[str]): extended ASCII string codepage.

    Returns:
      str: MRUList entry value.
    """
    value_string = ''

    value = registry_key.GetValueByName('{0:d}'.format(entry_number))
    if value is None:
      parser_mediator.ProduceExtractionWarning(
          'missing MRUListEx value: {0:d} in key: {1:s}.'.format(
              entry_number, registry_key.path))

    elif not value.DataIsBinaryData():
      logger.debug((
          '[{0:s}] Non-binary MRUListEx entry value: {1:d} in key: '
          '{2:s}.').format(self.NAME, entry_number, registry_key.path))

    elif value.data:
      utf16le_string_map = self._GetDataTypeMap('utf16le_string')

      context = dtfabric_data_maps.DataTypeMapContext()

      try:
        path = self._ReadStructureFromByteStream(
            value.data, 0, utf16le_string_map, context=context)
      except (ValueError, errors.ParseError) as exception:
        parser_mediator.ProduceExtractionWarning((
            'unable to parse MRUListEx entry value: {0:d} with error: '
            '{1!s}').format(entry_number, exception))
        return value_string

      path = path.rstrip('\x00')

      shell_item_list_data = value.data[context.byte_size:]

      if not shell_item_list_data:
        parser_mediator.ProduceExtractionWarning((
            'missing shell item in MRUListEx value: {0:d} in key: '
            '{1:s}.').format(entry_number, registry_key.path))
        value_string = 'Path: {0:s}'.format(path)

      else:
        shell_items_parser = shell_items.ShellItemsParser(registry_key.path)
        shell_items_parser.ParseByteStream(
            parser_mediator, shell_item_list_data, codepage=codepage)

        shell_item_path = shell_items_parser.CopyToPath() or 'N/A'
        value_string = 'Path: {0:s}, Shell item path: {1:s}'.format(
            path, shell_item_path)

    return value_string
예제 #18
0
    def _ReadChangeLogEntry(self, file_object):
        """Reads a change log entry.

    Args:
      file_object (file): file-like object.

    Returns:
      ChangeLogEntry: a change log entry.

    Raises:
      ParseError: if the change log entry cannot be read.
    """
        file_offset = file_object.tell()
        data_type_map = self._GetDataTypeMap('rp_change_log_entry')

        change_log_entry_record, data_size = self._ReadStructureFromFileObject(
            file_object, file_offset, data_type_map, 'change log entry record')

        if self._debug:
            self._DebugPrintChangeLogEntryRecord(change_log_entry_record)

        if change_log_entry_record.record_type != 1:
            raise errors.ParseError('Unsupported record type: {0:d}'.format(
                change_log_entry_record.record_type))

        signature = change_log_entry_record.signature
        if signature != self._RECORD_SIGNATURE:
            raise errors.ParseError('Unsupported change.log file signature')

        # TODO: refactor to use size hints
        record_size = (change_log_entry_record.record_size - data_size)
        record_data = file_object.read(record_size)
        file_offset += data_size

        if self._debug:
            self._DebugPrintData('Record data', record_data)

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'rp_change_log_entry': change_log_entry_record})

        data_type_map = self._GetDataTypeMap('rp_change_log_entry2')

        try:
            change_log_entry_record2 = self._ReadStructureFromByteStream(
                record_data,
                file_offset,
                data_type_map,
                'change log entry record',
                context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse change log entry record with error: {0!s}'.
                format(exception))

        if self._debug:
            self._DebugPrintValue('Process name',
                                  change_log_entry_record2.process_name[:-1])

            self._DebugPrintText('\n')

        change_log_entry = ChangeLogEntry()
        change_log_entry.entry_type = change_log_entry_record.entry_type
        change_log_entry.entry_flags = change_log_entry_record.entry_flags
        change_log_entry.file_attribute_flags = (
            change_log_entry_record.file_attribute_flags)
        change_log_entry.sequence_number = change_log_entry_record.sequence_number
        change_log_entry.process_name = change_log_entry_record2.process_name[:
                                                                              -1]

        sub_record_data_offset = context.byte_size
        sub_record_data_size = record_size - 4
        if self._debug:
            value_string = '{0:d}'.format(sub_record_data_offset)
            self._DebugPrintValue('Sub record data offset', value_string)

            value_string = '{0:d}'.format(sub_record_data_size -
                                          sub_record_data_offset)
            self._DebugPrintValue('Sub record data size', value_string)

            if sub_record_data_offset < sub_record_data_size:
                self._DebugPrintText('\n')

        while sub_record_data_offset < sub_record_data_size:
            read_size = self._ReadRecord(record_data, sub_record_data_offset)
            if read_size == 0:
                break
            sub_record_data_offset += read_size

        data_type_map = self._GetDataTypeMap('uint32le')

        try:
            copy_of_record_size = self._ReadStructureFromByteStream(
                record_data[-4:], sub_record_data_offset, data_type_map,
                'copy of record size')
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse copy of record size with error: {0!s}'.format(
                    exception))

        if change_log_entry_record.record_size != copy_of_record_size:
            raise errors.ParseError(
                'Record size mismatch ({0:d} != {1:d})'.format(
                    change_log_entry_record.record_size, copy_of_record_size))

        if self._debug:
            value_string = '{0:d}'.format(copy_of_record_size)
            self._DebugPrintValue('Copy of record size', value_string)

            self._DebugPrintText('\n')

        return change_log_entry
예제 #19
0
    def _ProcessKeyWithMRUListExValue(self, registry_key):
        """Processes a Windows Registry key that contains a MRUListEx value.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Returns:
      bool: True if a Most Recently Used (MRU) key was found, False if not.
    """
        # TODO: determine what trailing data is in:
        # HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\
        # ComDlg32\CIDSizeMRU

        registry_value = registry_key.GetValueByName('MRUListEx')

        data_type_map = self._GetDataTypeMap('mrulistex_entries')

        context = dtfabric_data_maps.DataTypeMapContext(
            values={'data_size': len(registry_value.data)})

        try:
            mrulistex_entries = self._ReadStructureFromByteStream(
                registry_value.data,
                0,
                data_type_map,
                'MRUListEx entries',
                context=context)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse MRUListEx entries with error: {0!s}'.format(
                    exception))

        mrulistex = set([])
        recovered_mrulistex = set([])
        is_recovered = False
        for entry_number in mrulistex_entries:
            if entry_number == 0:
                is_recovered = True

            if is_recovered:
                recovered_mrulistex.add(entry_number)
            else:
                mrulistex.add(entry_number)

        result = False
        for registry_value in registry_key.GetValues():
            if registry_value.name in ('MRUListEx', 'NodeSlot', 'NodeSlots'):
                continue

            if self._debug:
                description = 'Key: {0:s}\nValue: {1:s}'.format(
                    registry_key.path, registry_value.name)
                self._output_writer.WriteText(description)

            if self._InKeyPaths(registry_key.path,
                                self._SHELL_ITEM_MRU_KEY_PATHS):
                self._ProcessMRUEntryShellItem(registry_key.path,
                                               registry_value.name,
                                               registry_value.data)

            elif self._InKeyPaths(registry_key.path,
                                  self._SHELL_ITEM_LIST_MRU_KEY_PATHS):
                self._ProcessMRUEntryShellItemList(registry_key.path,
                                                   registry_value.name,
                                                   registry_value.data)

            elif self._InKeyPaths(registry_key.path,
                                  self._STRING_AND_SHELL_ITEM_MRU_KEY_PATHS):
                self._ProcessMRUEntryStringAndShellItem(
                    registry_key.path, registry_value.name,
                    registry_value.data)

            elif self._InKeyPaths(
                    registry_key.path,
                    self._STRING_AND_SHELL_ITEM_LIST_MRU_KEY_PATHS):
                self._ProcessMRUEntryStringAndShellItemList(
                    registry_key.path, registry_value.name,
                    registry_value.data)

            else:
                self._ProcessMRUEntryString(registry_key.path,
                                            registry_value.name,
                                            registry_value.data)

            result = True

        return result
예제 #20
0
  def _ProcessKeyWithMRUListValue(self, registry_key):
    """Processes a Windows Registry key that contains a MRUList value.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Returns:
      bool: True if a Most Recently Used (MRU) key was found, False if not.

    Raises:
      ParseError: if the MRUList value could not be parsed.
    """
    registry_value = registry_key.GetValueByName('MRUList')

    data_type_map = self._GetDataTypeMap('mrulist_entries')

    context = dtfabric_data_maps.DataTypeMapContext(values={
        'data_size': len(registry_value.data)})

    try:
      mrulist_entries = self._ReadStructureFromByteStream(
          registry_value.data, 0, data_type_map, 'MRUList entries',
          context=context)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError(
          'Unable to parse MRUList entries with error: {0!s}'.format(exception))

    mrulist = set([])
    recovered_mrulist = set([])
    is_recovered = False
    for entry_letter in mrulist_entries:
      if entry_letter == 0:
        is_recovered = True

      entry_letter = chr(entry_letter)

      if is_recovered:
        recovered_mrulist.add(entry_letter)
      else:
        mrulist.add(entry_letter)

    result = False
    for registry_value in registry_key.GetValues():
      if registry_value.name in ('MRUList', 'NodeSlot', 'NodeSlots'):
        continue

      if self._debug:
        description = 'Key: {0:s}\nValue: {1:s}'.format(
            registry_key.path, registry_value.name)
        self._output_writer.WriteText(description)

      if self._InKeyPaths(registry_key.path, self._SHELL_ITEM_MRU_KEY_PATHS):
        self._ProcessMRUEntryShellItem(
            registry_key.path, registry_value.name, registry_value.data)

      elif self._InKeyPaths(
          registry_key.path, self._SHELL_ITEM_LIST_MRU_KEY_PATHS):
        self._ProcessMRUEntryShellItemList(
            registry_key.path, registry_value.name, registry_value.data)

      elif self._InKeyPaths(
          registry_key.path, self._STRING_AND_SHELL_ITEM_MRU_KEY_PATHS):
        self._ProcessMRUEntryStringAndShellItem(
            registry_key.path, registry_value.name, registry_value.data)

      elif self._InKeyPaths(
          registry_key.path, self._STRING_AND_SHELL_ITEM_LIST_MRU_KEY_PATHS):
        self._ProcessMRUEntryStringAndShellItemList(
            registry_key.path, registry_value.name, registry_value.data)

      else:
        self._ProcessMRUEntryString(
            registry_key.path, registry_value.name, registry_value.data)

      result = True

    return result