Ejemplo n.º 1
0
class Wdigest_x64(Wdigest, Mimikatz_x64):
  """TODO: add description."""

  WDIGEST_LIST_ENTRY = construct.Struct('WdigestListEntry',
      construct.ULInt64('previous'),
      construct.ULInt64('next'),
      construct.ULInt32('usage_count'),
      construct.ULInt32('align1'),
      construct.ULInt64('this_entry'),
      construct.ULInt64('luid'),
      construct.ULInt64('flag'),
      construct.ULInt16('user_len'),
      construct.ULInt16('user_max_len'),
      construct.ULInt32('align2'),
      construct.ULInt64('user_string_ptr'),
      construct.ULInt16('domain_len'),
      construct.ULInt16('domain_max_len'),
      construct.ULInt32('align3'),
      construct.ULInt64('domain_string_ptr'),
      construct.ULInt16('password_len'),
      construct.ULInt16('password_max_len'),
      construct.ULInt32('align4'),
      construct.ULInt64('password_encrypted_ptr'))
  
  def __init__(self, lsass_task, credentials_obj):
    Mimikatz_x64.__init__(self, lsass_task)
    Wdigest.__init__(self, credentials_obj)
Ejemplo n.º 2
0
class WinRecycleBinParser(interface.FileObjectParser):
    """Parses the Windows $Recycle.Bin $I files."""

    NAME = u'recycle_bin'
    DESCRIPTION = u'Parser for Windows $Recycle.Bin $I files.'

    # Define a list of all structs needed.
    # Struct read from:
    # https://code.google.com/p/rifiuti2/source/browse/trunk/src/rifiuti-vista.h
    RECORD_STRUCT = construct.Struct(u'record', construct.ULInt64(u'filesize'),
                                     construct.ULInt64(u'filetime'))

    MAGIC_STRUCT = construct.ULInt64(u'magic')

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows RecycleBin $Ixx file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            magic_header = self.MAGIC_STRUCT.parse_stream(file_object)
        except (construct.FieldError, IOError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse $Ixxx file with error: {0:s}'.format(
                    exception))

        if magic_header != 1:
            raise errors.UnableToParseFile(
                u'Not an $Ixxx file, wrong magic header.')

        # We may have to rely on filenames since this header is very generic.
        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith(u'$I'):
            raise errors.UnableToParseFile(
                u'Not an $Ixxx file, filename doesn\'t start with $I.')

        record = self.RECORD_STRUCT.parse_stream(file_object)
        filename_utf = binary.ReadUTF16Stream(file_object)

        filetime = record.get(u'filetime', 0)
        # TODO: handle missing timestamp.
        event_object = WinRecycleEvent(filetime, u'', filename_utf, record, 0)
        parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 3
0
class WinRecycleBinParser(interface.BaseParser):
    """Parses the Windows $Recycle.Bin $I files."""

    NAME = 'recycle_bin'
    DESCRIPTION = u'Parser for Windows $Recycle.Bin $I files.'

    # Define a list of all structs needed.
    # Struct read from:
    # https://code.google.com/p/rifiuti2/source/browse/trunk/src/rifiuti-vista.h
    RECORD_STRUCT = construct.Struct('record', construct.ULInt64('filesize'),
                                     construct.ULInt64('filetime'))

    MAGIC_STRUCT = construct.ULInt64('magic')

    def Parse(self, parser_context, file_entry):
        """Extract entries from a Windows RecycleBin $Ixx file.

    Args:
      parser_context: A parser context object (instance of ParserContext).
      file_entry: A file entry object (instance of dfvfs.FileEntry).
    """
        file_object = file_entry.GetFileObject()
        try:
            magic_header = self.MAGIC_STRUCT.parse_stream(file_object)
        except (construct.FieldError, IOError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse $Ixxx file with error: {0:s}'.format(
                    exception))

        if magic_header is not 1:
            raise errors.UnableToParseFile(
                u'Not an $Ixxx file, wrong magic header.')

        # We may have to rely on filenames since this header is very generic.
        # TODO: Rethink this and potentially make a better test.
        base_filename = utils.GetBaseName(file_entry.name)
        if not base_filename.startswith('$I'):
            raise errors.UnableToParseFile(
                u'Not an $Ixxx file, filename doesn\'t start with $I.')

        record = self.RECORD_STRUCT.parse_stream(file_object)
        filename_utf = binary.ReadUtf16Stream(file_object)

        file_object.close()
        event_object = WinRecycleEvent(u'', filename_utf, record, 0)
        parser_context.ProduceEvent(event_object,
                                    parser_name=self.NAME,
                                    file_entry=file_entry)
Ejemplo n.º 4
0
class LsaDecryptor_Vista_x64(LsaDecryptor_x64):
  """Class for Vista x64."""
  SIGNATURE	= '\x83\x64\x24\x30\x00\x44\x8b\x4c\x24\x48\x48\x8b\x0d'
  PTR_IV_OFFSET = 63;
  PTR_AES_KEY_OFFSET = 25;
  PTR_DES_KEY_OFFSET = -69;

  BCRYPT_HANDLE_KEY = construct.Struct('KIWI_BCRYPT_HANDLE_KEY',
      construct.ULInt32('size'),
      construct.ULInt32('tag'), # Tag 'UUUR', 0x55555552.
      construct.ULInt64('ptr_void_algorithm'),
      construct.ULInt64('ptr_kiwi_bcrypt_key'),
      construct.ULInt64('ptr_unknown'))
  
  def __init__(self, lsass_task):
    LsaDecryptor_x64.__init__(self, lsass_task)
Ejemplo n.º 5
0
class LsaDecryptor_Win7_x64(LsaDecryptor_x64):
  """Class for Windows 7 x64."""
  # MIMIKATZ x64: BYTE PTRN_WNO8_LsaInitializeProtectedMemory_KEY[]
  SIGNATURE	= '\x83\x64\x24\x30\x00\x44\x8b\x4c\x24\x48\x48\x8b\x0d'
  PTR_IV_OFFSET = 59;
  PTR_AES_KEY_OFFSET = 25;
  PTR_DES_KEY_OFFSET = -61;

  BCRYPT_HANDLE_KEY = construct.Struct('KIWI_BCRYPT_HANDLE_KEY',
      construct.ULInt32('size'),
      construct.ULInt32('tag'), # Tag 'UUUR', 0x55555552.
      construct.ULInt64('ptr_void_algorithm'),
      construct.ULInt64('ptr_kiwi_bcrypt_key'),
      construct.ULInt64('ptr_unknown'))
  
  def __init__(self, lsass_task):
    LsaDecryptor_x64.__init__(self, lsass_task)
Ejemplo n.º 6
0
def decode_itempos(itempos):
    """
    Decodes a single itempos and returns extracted information
    """
    itempos_io = StringIO.StringIO(itempos)
    itempos_struct = construct.Struct("itempos",
                                      construct.ULInt16("itempos_size"),
                                      construct.Padding(2),
                                      construct.ULInt32("filesize"),
                                      construct.Bytes("dos_date", 2),
                                      construct.Bytes("dos_time", 2),
                                      construct.ULInt16("file_attr"),
                                      construct.CString("filename")
                                      )
    parse_res = itempos_struct.parse_stream(itempos_io)
    if itempos_io.pos % 2 == 1:
        itempos_io.read(1)
    ext_struct = construct.Struct("ext",
                                  construct.ULInt16("ext_size"),
                                  construct.ULInt16("ext_version")
                                  )
    parse_ext = ext_struct.parse_stream(itempos_io)
    if parse_ext["ext_version"] >= 0x3:
        itempos2_struct = construct.Struct("itempos2",
                                           construct.Padding(2),  # 0004
                                           construct.Padding(2),  # BEEF
                                           construct.Bytes("creation_dos_date", 2),
                                           construct.Bytes("creation_dos_time", 2),
                                           construct.Bytes("access_dos_date", 2),
                                           construct.Bytes("access_dos_time", 2),
                                           construct.Padding(4)
                                           )
        parse_res2 = itempos2_struct.parse_stream(itempos_io)
    unicode_filename = ""
    if parse_ext["ext_version"] >= 0x7:
        itempos3_struct = construct.Struct("itempos3",
                                           construct.ULInt64("file_ref"),
                                           construct.Padding(8),
                                           construct.Padding(2),
                                           construct.Padding(4)
                                           )
        parse_res3 = itempos3_struct.parse_stream(itempos_io)
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char
    elif parse_ext["ext_version"] >= 0x3:
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char

    timestamp_modified = dosdate(parse_res["dos_date"], parse_res["dos_time"]).strftime("%d/%m/%Y %H:%M:%S")
    timestamp_created = dosdate(parse_res2["creation_dos_date"], parse_res2["creation_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")
    timestamp_access = dosdate(parse_res2["access_dos_date"], parse_res2["access_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")

    return [unicode(parse_res["itempos_size"]), unicode(parse_res["filesize"]), timestamp_modified,
            parse_res["filename"], timestamp_created, timestamp_access, unicode_filename]
Ejemplo n.º 7
0
class ShutdownPlugin(interface.KeyPlugin):
    """Windows Registry plugin for parsing the last shutdown time of a system."""

    NAME = u'windows_shutdown'
    DESCRIPTION = u'Parser for ShutdownTime Registry value.'

    REG_KEYS = [u'\\{current_control_set}\\Control\\Windows']
    REG_TYPE = u'SYSTEM'
    FILETIME_STRUCT = construct.ULInt64(u'filetime_timestamp')

    def GetEntries(self,
                   parser_mediator,
                   key=None,
                   registry_file_type=None,
                   codepage=u'cp1252',
                   **unused_kwargs):
        """Collect ShutdownTime value under Windows and produce an event object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      key: Optional Registry key (instance of winreg.WinRegKey).
          The default is None.
      registry_file_type: Optional string containing the Windows Registry file
                          type, e.g. NTUSER, SOFTWARE. The default is None.
      codepage: Optional extended ASCII string codepage. The default is cp1252.
    """
        shutdown_value = key.GetValue(u'ShutdownTime')
        if not shutdown_value:
            return
        text_dict = {}
        text_dict[u'Description'] = shutdown_value.name
        try:
            filetime = self.FILETIME_STRUCT.parse(shutdown_value.data)
        except construct.FieldError as exception:
            parser_mediator.ProduceParseError(
                u'Unable to extract shutdown timestamp with error: {0:s}'.
                format(exception))
            return
        timestamp = timelib.Timestamp.FromFiletime(filetime)

        event_object = windows_events.WindowsRegistryEvent(
            timestamp,
            key.path,
            text_dict,
            usage=eventdata.EventTimestamp.LAST_SHUTDOWN,
            offset=key.offset,
            registry_file_type=registry_file_type,
            source_append=u'Shutdown Entry')
        parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 8
0
class ShutdownPlugin(interface.WindowsRegistryPlugin):
    """Windows Registry plugin for parsing the last shutdown time of a system."""

    NAME = u'windows_shutdown'
    DESCRIPTION = u'Parser for ShutdownTime Registry value.'

    FILTERS = frozenset([
        interface.WindowsRegistryKeyPathFilter(
            u'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows')
    ])

    _UINT64_STRUCT = construct.ULInt64(u'value')

    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a ShutdownTime Windows Registry value.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        shutdown_value = registry_key.GetValueByName(u'ShutdownTime')
        if not shutdown_value:
            return

        # Directly parse the Windows Registry value data in case it is defined
        # as binary data.
        try:
            timestamp = self._UINT64_STRUCT.parse(shutdown_value.data)
        except construct.FieldError as exception:
            timestamp = None
            parser_mediator.ProduceExtractionError(
                u'unable to determine shutdown timestamp with error: {0:s}'.
                format(exception))

        if not timestamp:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

        event_data = ShutdownWindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = shutdown_value.offset
        event_data.value_name = shutdown_value.name

        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.LAST_SHUTDOWN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 9
0
class ShutdownPlugin(interface.WindowsRegistryPlugin):
    """Windows Registry plugin for parsing the last shutdown time of a system."""

    NAME = u'windows_shutdown'
    DESCRIPTION = u'Parser for ShutdownTime Registry value.'

    FILTERS = frozenset([
        interface.WindowsRegistryKeyPathFilter(
            u'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows')
    ])

    _UINT64_STRUCT = construct.ULInt64(u'value')

    _SOURCE_APPEND = u'Shutdown Entry'

    def GetEntries(self, parser_mediator, registry_key, **kwargs):
        """Collect ShutdownTime value under Windows and produce an event object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      registry_key: A Windows Registry key (instance of
                    dfwinreg.WinRegistryKey).
    """
        shutdown_value = registry_key.GetValueByName(u'ShutdownTime')
        if not shutdown_value:
            return

        value_integer = shutdown_value.GetDataAsObject()
        try:
            filetime = self._UINT64_STRUCT.parse(value_integer)
        except construct.FieldError as exception:
            parser_mediator.ProduceParseError(
                (u'Unable to extract shutdown timestamp: {0:d} with error: '
                 u'{1:s}').format(value_integer, exception))
            return

        values_dict = {u'Description': shutdown_value.name}

        event_object = windows_events.WindowsRegistryEvent(
            filetime,
            registry_key.path,
            values_dict,
            offset=registry_key.offset,
            source_append=self._SOURCE_APPEND,
            usage=eventdata.EventTimestamp.LAST_SHUTDOWN)
        parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 10
0
class TaskCacheCollector(collector.WindowsVolumeCollector):
    """Class that defines a Task Cache collector.

  Attributes:
    key_found (bool): True if the Windows Registry key was found.
  """

    _DYNAMIC_INFO_STRUCT = construct.Struct(
        u'dynamic_info_record', construct.ULInt32(u'unknown1'),
        construct.ULInt64(u'last_registered_time'),
        construct.ULInt64(u'launch_time'), construct.ULInt32(u'unknown2'),
        construct.ULInt32(u'unknown3'))

    _DYNAMIC_INFO_STRUCT_SIZE = _DYNAMIC_INFO_STRUCT.sizeof()

    _DYNAMIC_INFO2_STRUCT = construct.Struct(
        u'dynamic_info2_record', construct.ULInt32(u'unknown1'),
        construct.ULInt64(u'last_registered_time'),
        construct.ULInt64(u'launch_time'), construct.ULInt32(u'unknown2'),
        construct.ULInt32(u'unknown3'), construct.ULInt64(u'unknown_time'))

    _DYNAMIC_INFO2_STRUCT_SIZE = _DYNAMIC_INFO2_STRUCT.sizeof()

    _TASK_CACHE_KEY_PATH = (
        u'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion\\'
        u'Schedule\\TaskCache')

    def __init__(self, debug=False, mediator=None):
        """Initializes the collector object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
      mediator (Optional[dfvfs.VolumeScannerMediator]): a volume scanner
          mediator.
    """
        super(TaskCacheCollector, self).__init__(mediator=mediator)
        self._debug = debug
        registry_file_reader = collector.CollectorRegistryFileReader(self)
        self._registry = registry.WinRegistry(
            registry_file_reader=registry_file_reader)

        self.key_found = False

    def _GetIdValue(self, registry_key):
        """Retrieves the Id value from Task Cache Tree key.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Yields:
      tuple[dfwinreg.WinRegistryKey, dfwinreg.WinRegistryValue]: Windows
          Registry key and value.
    """
        id_value = registry_key.GetValueByName(u'Id')
        if id_value:
            yield registry_key, id_value

        for sub_key in registry_key.GetSubkeys():
            for value_key, id_value in self._GetIdValue(sub_key):
                yield value_key, id_value

    def Collect(self, output_writer):
        """Collects the Task Cache.

    Args:
      output_writer (OutputWriter): output writer.
    """
        dynamic_info_size_error_reported = False

        self.key_found = False

        task_cache_key = self._registry.GetKeyByPath(self._TASK_CACHE_KEY_PATH)
        if not task_cache_key:
            return

        tasks_key = task_cache_key.GetSubkeyByName(u'Tasks')
        tree_key = task_cache_key.GetSubkeyByName(u'Tree')

        if not tasks_key or not tree_key:
            return

        self.key_found = True

        task_guids = {}
        for sub_key in tree_key.GetSubkeys():
            for value_key, id_value in self._GetIdValue(sub_key):
                # TODO: improve this check to a regex.
                # The GUID is in the form {%GUID%} and stored an UTF-16 little-endian
                # string and should be 78 bytes in size.

                id_value_data_size = len(id_value.data)
                if id_value_data_size != 78:
                    logging.error(u'Unsupported Id value data size: {0:s}.')
                    continue

                guid_string = id_value.GetDataAsObject()
                task_guids[guid_string] = value_key.name

        for sub_key in tasks_key.GetSubkeys():
            dynamic_info_value = sub_key.GetValueByName(u'DynamicInfo')
            if not dynamic_info_value:
                continue

            dynamic_info_value_data = dynamic_info_value.data
            dynamic_info_value_data_size = len(dynamic_info_value_data)

            if self._debug:
                print(u'DynamicInfo value data:')
                print(hexdump.Hexdump(dynamic_info_value_data))

            if dynamic_info_value_data_size == self._DYNAMIC_INFO_STRUCT_SIZE:
                dynamic_info_struct = self._DYNAMIC_INFO_STRUCT.parse(
                    dynamic_info_value_data)

            elif dynamic_info_value_data_size == self._DYNAMIC_INFO2_STRUCT_SIZE:
                dynamic_info_struct = self._DYNAMIC_INFO2_STRUCT.parse(
                    dynamic_info_value_data)

            else:
                if not dynamic_info_size_error_reported:
                    logging.error(
                        u'Unsupported DynamicInfo value data size: {0:d}.'.
                        format(dynamic_info_value_data_size))
                    dynamic_info_size_error_reported = True
                continue

            last_registered_time = dynamic_info_struct.get(
                u'last_registered_time')
            launch_time = dynamic_info_struct.get(u'launch_time')
            unknown_time = dynamic_info_struct.get(u'unknown_time')

            if self._debug:
                print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    dynamic_info_struct.get(u'unknown1')))

                timestamp = last_registered_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                print(u'Last registered time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.
                      format(date_string, last_registered_time))

                timestamp = launch_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                print(u'Launch time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format(
                    date_string, launch_time))

                print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    dynamic_info_struct.get(u'unknown2')))
                print(u'Unknown3\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    dynamic_info_struct.get(u'unknown3')))

                if dynamic_info_value_data_size == self._DYNAMIC_INFO2_STRUCT_SIZE:
                    timestamp = unknown_time // 10
                    date_string = (datetime.datetime(1601, 1, 1) +
                                   datetime.timedelta(microseconds=timestamp))

                    print(u'Unknown time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.
                          format(date_string, unknown_time))

                print(u'')

            name = task_guids.get(sub_key.name, sub_key.name)

            output_writer.WriteText(u'Task: {0:s}'.format(name))
            output_writer.WriteText(u'ID: {0:s}'.format(sub_key.name))

            timestamp = task_cache_key.last_written_time // 10
            date_string = (datetime.datetime(1601, 1, 1) +
                           datetime.timedelta(microseconds=timestamp))

            output_writer.WriteText(
                u'Last written time: {0!s}'.format(date_string))

            if last_registered_time:
                # Note this is likely either the last registered time or
                # the update time.
                timestamp = last_registered_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                output_writer.WriteText(
                    u'Last registered time: {0!s}'.format(date_string))

            if launch_time:
                # Note this is likely the launch time.
                timestamp = launch_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                output_writer.WriteText(
                    u'Launch time: {0!s}'.format(date_string))

            if unknown_time:
                timestamp = unknown_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                output_writer.WriteText(
                    u'Unknown time: {0!s}'.format(date_string))

            output_writer.WriteText(u'')
Ejemplo n.º 11
0
class SAMUsersWindowsRegistryPlugin(interface.WindowsRegistryPlugin):
  """Windows Registry plugin for SAM Users Account information."""

  NAME = u'windows_sam_users'
  DESCRIPTION = u'Parser for SAM Users and Names Registry keys.'

  FILTERS = frozenset([
      interface.WindowsRegistryKeyPathFilter(
          u'HKEY_LOCAL_MACHINE\\SAM\\Domains\\Account\\Users')])

  F_VALUE_STRUCT = construct.Struct(
      u'f_struct',
      construct.Padding(8),
      construct.ULInt64(u'last_login'),
      construct.Padding(8),
      construct.ULInt64(u'password_reset'),
      construct.Padding(16),
      construct.ULInt16(u'rid'),
      construct.Padding(16),
      construct.ULInt8(u'login_count'))

  V_VALUE_HEADER = construct.Struct(
      u'v_header',
      construct.Array(11, construct.ULInt32(u'values')))

  V_VALUE_HEADER_SIZE = 0xCC

  _SOURCE_APPEND = u'User Account Information'

  def _ParseFValue(self, key):
    """Parses F value and returns parsed F data construct object.

    Args:
      key: Registry key (instance of dfwinreg.WinRegistryKey).

    Returns:
      f_data: Construct parsed F value containing rid, login count,
              and timestamp information.
    """
    f_value = key.GetValueByName(u'F')
    if not f_value:
      logging.error(u'Unable to locate F Value in key.')
      return
    try:
      f_data = self.F_VALUE_STRUCT.parse(f_value.data)
    except construct.FieldError as exception:
      logging.error(
          u'Unable to extract F value data: {:s}'.format(exception))
      return
    return f_data

  def _ParseVValue(self, key):
    """Parses V value and returns name, fullname, and comments data.

    Args:
      key: Registry key (instance of dfwinreg.WinRegistryKey).

    Returns:
      name: Name data parsed with name start and length values.
      fullname: Fullname data parsed with fullname start and length values.
      comments: Comments data parsed with comments start and length values.
    """
    v_value = key.GetValueByName(u'V')
    if not v_value:
      logging.error(u'Unable to locate V Value in key.')
      return
    try:
      structure = self.V_VALUE_HEADER.parse(v_value.data)
    except construct.FieldError as exception:
      logging.error(
          u'Unable to extract V value header data with error: {0:s}'.format(
              exception))
      return
    name_offset = structure.values()[0][3] + self.V_VALUE_HEADER_SIZE
    full_name_offset = structure.values()[0][6] + self.V_VALUE_HEADER_SIZE
    comments_offset = structure.values()[0][9] + self.V_VALUE_HEADER_SIZE
    name_raw = v_value.data[
        name_offset:name_offset + structure.values()[0][4]]
    full_name_raw = v_value.data[
        full_name_offset:full_name_offset + structure.values()[0][7]]
    comments_raw = v_value.data[
        comments_offset:comments_offset + structure.values()[0][10]]
    name = binary.ReadUTF16(name_raw)
    full_name = binary.ReadUTF16(full_name_raw)
    comments = binary.ReadUTF16(comments_raw)
    return name, full_name, comments

  def GetEntries(self, parser_mediator, registry_key, **kwargs):
    """Collect data from Users and Names and produce event objects.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      registry_key: A Windows Registry key (instance of
                    dfwinreg.WinRegistryKey).
    """
    name_key = registry_key.GetSubkeyByName(u'Names')
    if not name_key:
      parser_mediator.ProduceParseError(u'Unable to locate Names key.')
      return
    values = [(v.name, v.last_written_time) for v in name_key.GetSubkeys()]

    name_dict = dict(values)

    for subkey in registry_key.GetSubkeys():
      if subkey.name == u'Names':
        continue

      parsed_v_value = self._ParseVValue(subkey)
      if not parsed_v_value:
        parser_mediator.ProduceParseError(
            u'Unable to parse SAM key: {0:s} V value.'.format(subkey))
        return

      username = parsed_v_value[0]
      full_name = parsed_v_value[1]
      comments = parsed_v_value[2]

      values_dict = {u'user_guid': subkey.name}

      if username:
        values_dict[u'username'] = username
      if full_name:
        values_dict[u'full_name'] = full_name
      if comments:
        values_dict[u'comments'] = comments
      if name_dict:
        account_create_time = name_dict.get(username, 0)
      else:
        account_create_time = 0

      f_data = self._ParseFValue(subkey)
      values_dict[u'account_rid'] = f_data.rid
      values_dict[u'login_count'] = f_data.login_count

      if account_create_time > 0:
        event_object = windows_events.WindowsRegistryEvent(
            account_create_time, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.ACCOUNT_CREATED,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)

      if f_data.last_login > 0:
        event_object = windows_events.WindowsRegistryEvent(
            f_data.last_login, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.LAST_LOGIN_TIME,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)

      if f_data.password_reset > 0:
        event_object = windows_events.WindowsRegistryEvent(
            f_data.password_reset, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.LAST_PASSWORD_RESET,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 12
0
class UserAssistPlugin(interface.WindowsRegistryPlugin):
    """Plugin that parses an UserAssist key."""

    NAME = 'userassist'
    DESCRIPTION = 'Parser for User Assist Registry data.'

    FILTERS = frozenset([
        UserAssistWindowsRegistryKeyPathFilter(
            'FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD'),
        UserAssistWindowsRegistryKeyPathFilter(
            'F4E57C4B-2036-45F0-A9AB-443BCFE33D9F'),
        UserAssistWindowsRegistryKeyPathFilter(
            'F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442'),
        UserAssistWindowsRegistryKeyPathFilter(
            'CEBFF5CD-ACE2-4F4F-9178-9926F41749EA'),
        UserAssistWindowsRegistryKeyPathFilter(
            'CAA59E3C-4792-41A5-9909-6A6A8D32490E'),
        UserAssistWindowsRegistryKeyPathFilter(
            'B267E3AD-A825-4A09-82B9-EEC22AA3B847'),
        UserAssistWindowsRegistryKeyPathFilter(
            'A3D53349-6E61-4557-8FC7-0028EDCEEBF6'),
        UserAssistWindowsRegistryKeyPathFilter(
            '9E04CAB2-CC14-11DF-BB8C-A2F1DED72085'),
        UserAssistWindowsRegistryKeyPathFilter(
            '75048700-EF1F-11D0-9888-006097DEACF9'),
        UserAssistWindowsRegistryKeyPathFilter(
            '5E6AB780-7743-11CF-A12B-00AA004AE837'),
        UserAssistWindowsRegistryKeyPathFilter(
            '0D6D4F41-2994-4BA0-8FEF-620E43CD2812'),
        UserAssistWindowsRegistryKeyPathFilter(
            'BCB48336-4DDD-48FF-BB0B-D3190DACB3E2')
    ])

    URLS = [
        'http://blog.didierstevens.com/programs/userassist/',
        'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys',
        'http://intotheboxes.files.wordpress.com/2010/04'
        '/intotheboxes_2010_q1.pdf'
    ]

    # UserAssist format version used in Windows 2000, XP, 2003, Vista.
    _USERASSIST_V3_STRUCT = construct.Struct(
        'userassist_entry', construct.Padding(4),
        construct.ULInt32('number_of_executions'),
        construct.ULInt64('timestamp'))

    # UserAssist format version used in Windows 2008, 7, 8.
    _USERASSIST_V5_STRUCT = construct.Struct(
        'userassist_entry', construct.Padding(4),
        construct.ULInt32('number_of_executions'),
        construct.ULInt32('application_focus_count'),
        construct.ULInt32('application_focus_duration'), construct.Padding(44),
        construct.ULInt64('timestamp'), construct.Padding(4))

    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        version_value = registry_key.GetValueByName('Version')
        count_subkey = registry_key.GetSubkeyByName('Count')

        if not version_value:
            parser_mediator.ProduceExtractionError('missing version value')
            return

        if not version_value.DataIsInteger():
            parser_mediator.ProduceExtractionError(
                'unsupported version value data type')
            return

        format_version = version_value.GetDataAsObject()
        if format_version not in (3, 5):
            parser_mediator.ProduceExtractionError(
                'unsupported format version: {0:d}'.format(format_version))
            return

        if not count_subkey:
            parser_mediator.ProduceExtractionError('missing count subkey')
            return

        userassist_entry_index = 0

        for registry_value in count_subkey.GetValues():
            try:
                value_name = registry_value.name.decode('rot-13')
            except UnicodeEncodeError as exception:
                logging.debug((
                    'Unable to decode UserAssist string: {0:s} with error: {1!s}.\n'
                    'Attempting piecewise decoding.').format(
                        registry_value.name, exception))

                characters = []
                for char in registry_value.name:
                    if ord(char) < 128:
                        try:
                            characters.append(char.decode('rot-13'))
                        except UnicodeEncodeError:
                            characters.append(char)
                    else:
                        characters.append(char)

                value_name = ''.join(characters)

            if format_version == 5:
                path_segments = value_name.split('\\')

                for segment_index in range(0, len(path_segments)):
                    # Remove the { } from the path segment to get the GUID.
                    guid = path_segments[segment_index][1:-1]
                    path_segments[segment_index] = known_folder_ids.PATHS.get(
                        guid, path_segments[segment_index])

                value_name = '\\'.join(path_segments)
                # Check if we might need to substitute values.
                if '%' in value_name:
                    # TODO: fix missing self._knowledge_base
                    # pylint: disable=no-member
                    environment_variables = self._knowledge_base.GetEnvironmentVariables(
                    )
                    value_name = path_helper.PathHelper.ExpandWindowsPath(
                        value_name, environment_variables)

            value_data_size = len(registry_value.data)
            if not registry_value.DataIsBinaryData():
                parser_mediator.ProduceExtractionError(
                    'unsupported value data type: {0:s}'.format(
                        registry_value.data_type_string))

            elif value_name == 'UEME_CTLSESSION':
                pass

            elif format_version == 3:
                if value_data_size != self._USERASSIST_V3_STRUCT.sizeof():
                    parser_mediator.ProduceExtractionError(
                        'unsupported value data size: {0:d}'.format(
                            value_data_size))

                else:
                    parsed_data = self._USERASSIST_V3_STRUCT.parse(
                        registry_value.data)
                    timestamp = parsed_data.get('timestamp', None)

                    number_of_executions = parsed_data.get(
                        'number_of_executions', None)
                    if number_of_executions is not None and number_of_executions > 5:
                        number_of_executions -= 5

                    event_data = UserAssistWindowsRegistryEventData()
                    event_data.key_path = count_subkey.path
                    event_data.number_of_executions = number_of_executions
                    event_data.offset = registry_value.offset
                    event_data.value_name = value_name

                    if not timestamp:
                        date_time = dfdatetime_semantic_time.SemanticTime(
                            'Not set')
                    else:
                        date_time = dfdatetime_filetime.Filetime(
                            timestamp=timestamp)

                    # TODO: check if last written is correct.
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            elif format_version == 5:
                if value_data_size != self._USERASSIST_V5_STRUCT.sizeof():
                    parser_mediator.ProduceExtractionError(
                        'unsupported value data size: {0:d}'.format(
                            value_data_size))

                parsed_data = self._USERASSIST_V5_STRUCT.parse(
                    registry_value.data)

                userassist_entry_index += 1
                timestamp = parsed_data.get('timestamp', None)

                event_data = UserAssistWindowsRegistryEventData()
                event_data.application_focus_count = parsed_data.get(
                    'application_focus_count', None)
                event_data.application_focus_duration = parsed_data.get(
                    'application_focus_duration', None)
                event_data.entry_index = userassist_entry_index
                event_data.key_path = count_subkey.path
                event_data.number_of_executions = parsed_data.get(
                    'number_of_executions', None)
                event_data.offset = count_subkey.offset
                event_data.value_name = value_name

                if not timestamp:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)

                # TODO: check if last written is correct.
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 13
0
class AppCompatCacheKeyParser(object):
    """Class that parses the Application Compatibility Cache data."""

    FORMAT_TYPE_2000 = 1
    FORMAT_TYPE_XP = 2
    FORMAT_TYPE_2003 = 3
    FORMAT_TYPE_VISTA = 4
    FORMAT_TYPE_7 = 5
    FORMAT_TYPE_8 = 6

    # AppCompatCache format signature used in Windows XP.
    _HEADER_SIGNATURE_XP = 0xdeadbeef

    # AppCompatCache format used in Windows XP.
    _HEADER_XP_32BIT_STRUCT = construct.Struct(
        'appcompatcache_header_xp', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'),
        construct.ULInt32('unknown1'), construct.ULInt32('unknown2'),
        construct.Padding(384))

    _CACHED_ENTRY_XP_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_xp_32bit',
        construct.Array(528, construct.Byte('path')),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'), construct.ULInt64('last_update_time'))

    # AppCompatCache format signature used in Windows 2003, Vista and 2008.
    _HEADER_SIGNATURE_2003 = 0xbadc0ffe

    # AppCompatCache format used in Windows 2003.
    _HEADER_2003_STRUCT = construct.Struct(
        'appcompatcache_header_2003', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'))

    _CACHED_ENTRY_2003_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_2003_32bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'))

    _CACHED_ENTRY_2003_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_2003_64bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('unknown1'), construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'))

    # AppCompatCache format used in Windows Vista and 2008.
    _CACHED_ENTRY_VISTA_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_vista_32bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'))

    _CACHED_ENTRY_VISTA_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_vista_64bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('unknown1'), construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'))

    # AppCompatCache format signature used in Windows 7 and 2008 R2.
    _HEADER_SIGNATURE_7 = 0xbadc0fee

    # AppCompatCache format used in Windows 7 and 2008 R2.
    _HEADER_7_STRUCT = construct.Struct(
        'appcompatcache_header_7', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'), construct.Padding(120))

    _CACHED_ENTRY_7_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_7_32bit', construct.ULInt16('path_size'),
        construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'),
        construct.ULInt32('data_size'), construct.ULInt32('data_offset'))

    _CACHED_ENTRY_7_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_7_64bit', construct.ULInt16('path_size'),
        construct.ULInt16('maximum_path_size'), construct.ULInt32('unknown1'),
        construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'),
        construct.ULInt64('data_size'), construct.ULInt64('data_offset'))

    # AppCompatCache format used in Windows 8.0 and 8.1.
    _HEADER_SIGNATURE_8 = 0x00000080

    _HEADER_8_STRUCT = construct.Struct('appcompatcache_header_8',
                                        construct.ULInt32('signature'),
                                        construct.Padding(124))

    _CACHED_ENTRY_HEADER_8_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_header_8', construct.ULInt32('signature'),
        construct.ULInt32('unknown1'),
        construct.ULInt32('cached_entry_data_size'),
        construct.ULInt16('path_size'))

    # AppCompatCache format used in Windows 8.0.
    _CACHED_ENTRY_SIGNATURE_8_0 = '00ts'

    # AppCompatCache format used in Windows 8.1.
    _CACHED_ENTRY_SIGNATURE_8_1 = '10ts'

    def CheckSignature(self, value_data):
        """Parses the signature.

    Args:
      value_data: a binary string containing the value data.

    Returns:
      The format type if successful or None otherwise.
    """
        signature = construct.ULInt32('signature').parse(value_data)
        if signature == self._HEADER_SIGNATURE_XP:
            return self.FORMAT_TYPE_XP

        elif signature == self._HEADER_SIGNATURE_2003:
            # TODO: determine which format version is used (2003 or Vista).
            return self.FORMAT_TYPE_2003

        elif signature == self._HEADER_SIGNATURE_7:
            return self.FORMAT_TYPE_7

        elif signature == self._HEADER_SIGNATURE_8:
            if value_data[signature:signature + 4] in [
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1
            ]:
                return self.FORMAT_TYPE_8

    def ParseHeader(self, format_type, value_data):
        """Parses the header.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.

    Returns:
      A header object (instance of AppCompatCacheHeader).

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        # TODO: change to collections.namedtuple or use __slots__ if the overhead
        # of a regular object becomes a problem.
        header_object = AppCompatCacheHeader()

        if format_type == self.FORMAT_TYPE_XP:
            header_object.header_size = self._HEADER_XP_32BIT_STRUCT.sizeof()
            header_struct = self._HEADER_XP_32BIT_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_2003:
            header_object.header_size = self._HEADER_2003_STRUCT.sizeof()
            header_struct = self._HEADER_2003_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            header_object.header_size = self._HEADER_VISTA_STRUCT.sizeof()
            header_struct = self._HEADER_VISTA_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_7:
            header_object.header_size = self._HEADER_7_STRUCT.sizeof()
            header_struct = self._HEADER_7_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_8:
            header_object.header_size = self._HEADER_8_STRUCT.sizeof()
            header_struct = self._HEADER_8_STRUCT.parse(value_data)

        if format_type in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7
        ]:
            header_object.number_of_cached_entries = header_struct.get(
                'number_of_cached_entries')

        return header_object

    def DetermineCacheEntrySize(self, format_type, value_data,
                                cached_entry_offset):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the first cached entry data relative to the start of
                           the value data.

    Returns:
      The cached entry size if successful or None otherwise.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[cached_entry_offset:]
        cached_entry_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof()

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = construct.ULInt16('path_size').parse(
                cached_entry_data[0:2])
            maximum_path_size = construct.ULInt16('maximum_path_size').parse(
                cached_entry_data[2:4])
            path_offset_32bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[4:8])
            path_offset_64bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[8:16])

            if maximum_path_size < path_size:
                logging.error(u'Path size value out of bounds.')
                return

            path_end_of_string_size = maximum_path_size - path_size
            if path_size == 0 or path_end_of_string_size != 2:
                logging.error(u'Unsupported path size values.')
                return

            # Assume the entry is 64-bit if the 32-bit path offset is 0 and
            # the 64-bit path offset is set.
            if path_offset_32bit == 0 and path_offset_64bit != 0:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
                    )

            else:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(
                    )

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof()

        return cached_entry_size

    def ParseCachedEntry(self, format_type, value_data, cached_entry_offset,
                         cached_entry_size):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the cached entry data relative to the start of
                           the value data.
      cached_entry_size: integer value that contains the cached entry data size.

    Returns:
      A cached entry object (instance of AppCompatCacheCachedEntry).

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[
            cached_entry_offset:cached_entry_offset + cached_entry_size]

        cached_entry_struct = None

        if format_type == self.FORMAT_TYPE_XP:
            if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_2003:
            if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_7:
            if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof():
                cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_8:
            if cached_entry_data[0:4] not in [
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1
            ]:
                raise RuntimeError(u'Unsupported cache entry signature')

            if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse(
                    cached_entry_data)

                cached_entry_data_size = cached_entry_struct.get(
                    'cached_entry_data_size')
                cached_entry_size = 12 + cached_entry_data_size

                cached_entry_data = value_data[
                    cached_entry_offset:cached_entry_offset +
                    cached_entry_size]

        if not cached_entry_struct:
            raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format(
                cached_entry_size))

        cached_entry_object = AppCompatCacheCachedEntry()
        cached_entry_object.cached_entry_size = cached_entry_size

        path_offset = 0
        data_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            string_size = 0
            for string_index in xrange(0, 528, 2):
                if (ord(cached_entry_data[string_index]) == 0
                        and ord(cached_entry_data[string_index + 1]) == 0):
                    break
                string_size += 2

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[0:string_size])

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = cached_entry_struct.get('path_size')
            path_offset = cached_entry_struct.get('path_offset')

        elif format_type == self.FORMAT_TYPE_8:
            path_size = cached_entry_struct.get('path_size')

            cached_entry_data_offset = 14 + path_size
            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[14:cached_entry_data_offset])

            remaining_data = cached_entry_data[cached_entry_data_offset:]

            cached_entry_object.insertion_flags = construct.ULInt32(
                'insertion_flags').parse(remaining_data[0:4])
            cached_entry_object.shim_flags = construct.ULInt32(
                'shim_flags').parse(remaining_data[4:8])

            if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0:
                cached_entry_data_offset += 8

            elif cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_1:
                cached_entry_data_offset += 10

            remaining_data = cached_entry_data[cached_entry_data_offset:]

        if format_type in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7
        ]:
            cached_entry_object.last_modification_time = cached_entry_struct.get(
                'last_modification_time')

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_object.last_modification_time = construct.ULInt64(
                'last_modification_time').parse(remaining_data[0:8])

        if format_type in [self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003]:
            cached_entry_object.file_size = cached_entry_struct.get(
                'file_size')

        elif format_type in [self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7]:
            cached_entry_object.insertion_flags = cached_entry_struct.get(
                'insertion_flags')
            cached_entry_object.shim_flags = cached_entry_struct.get(
                'shim_flags')

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_object.last_update_time = cached_entry_struct.get(
                'last_update_time')

        if format_type == self.FORMAT_TYPE_7:
            data_offset = cached_entry_struct.get('data_offset')
            data_size = cached_entry_struct.get('data_size')

        elif format_type == self.FORMAT_TYPE_8:
            data_offset = cached_entry_offset + cached_entry_data_offset + 12
            data_size = construct.ULInt32('data_size').parse(
                remaining_data[8:12])

        if path_offset > 0 and path_size > 0:
            path_size += path_offset

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                value_data[path_offset:path_size])

        if data_size > 0:
            data_size += data_offset

            cached_entry_object.data = value_data[data_offset:data_size]

        return cached_entry_object
Ejemplo n.º 14
0
def FILETIME(name):
    return FileTimeAdapter(construct.ULInt64(name))
Ejemplo n.º 15
0
class UserAssistPlugin(interface.WindowsRegistryPlugin):
  """Plugin that parses an UserAssist key."""

  NAME = u'userassist'
  DESCRIPTION = u'Parser for User Assist Registry data.'

  FILTERS = frozenset([
      UserAssistWindowsRegistryKeyPathFilter(
          u'FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'F4E57C4B-2036-45F0-A9AB-443BCFE33D9F'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'CEBFF5CD-ACE2-4F4F-9178-9926F41749EA'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'CAA59E3C-4792-41A5-9909-6A6A8D32490E'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'B267E3AD-A825-4A09-82B9-EEC22AA3B847'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'A3D53349-6E61-4557-8FC7-0028EDCEEBF6'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'9E04CAB2-CC14-11DF-BB8C-A2F1DED72085'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'75048700-EF1F-11D0-9888-006097DEACF9'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'5E6AB780-7743-11CF-A12B-00AA004AE837'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'0D6D4F41-2994-4BA0-8FEF-620E43CD2812'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'BCB48336-4DDD-48FF-BB0B-D3190DACB3E2')])

  URLS = [
      u'http://blog.didierstevens.com/programs/userassist/',
      u'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys',
      u'http://intotheboxes.files.wordpress.com/2010/04'
      u'/intotheboxes_2010_q1.pdf']

  # UserAssist format version used in Windows 2000, XP, 2003, Vista.
  _USERASSIST_V3_STRUCT = construct.Struct(
      u'userassist_entry',
      construct.Padding(4),
      construct.ULInt32(u'count'),
      construct.ULInt64(u'timestamp'))

  # UserAssist format version used in Windows 2008, 7, 8.
  _USERASSIST_V5_STRUCT = construct.Struct(
      u'userassist_entry',
      construct.Padding(4),
      construct.ULInt32(u'count'),
      construct.ULInt32(u'app_focus_count'),
      construct.ULInt32(u'focus_duration'),
      construct.Padding(44),
      construct.ULInt64(u'timestamp'),
      construct.Padding(4))

  def GetEntries(self, parser_mediator, registry_key, **kwargs):
    """Parses a UserAssist Registry key.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      registry_key: A Windows Registry key (instance of
                    dfwinreg.WinRegistryKey).
    """
    version_value = registry_key.GetValueByName(u'Version')
    count_subkey = registry_key.GetSubkeyByName(u'Count')

    if not version_value:
      parser_mediator.ProduceExtractionError(u'Missing version value')
      return

    if not version_value.DataIsInteger():
      parser_mediator.ProduceExtractionError(
          u'Unsupported version value data type')
      return

    format_version = version_value.GetDataAsObject()
    if format_version not in (3, 5):
      parser_mediator.ProduceExtractionError(
          u'Unsupported format version: {0:d}'.format(format_version))
      return

    if not count_subkey:
      parser_mediator.ProduceExtractionError(u'Missing count subkey')
      return

    userassist_entry_index = 0

    for registry_value in count_subkey.GetValues():
      try:
        value_name = registry_value.name.decode(u'rot-13')
      except UnicodeEncodeError as exception:
        logging.debug((
            u'Unable to decode UserAssist string: {0:s} with error: {1:s}.\n'
            u'Attempting piecewise decoding.').format(
                registry_value.name, exception))

        characters = []
        for char in registry_value.name:
          if ord(char) < 128:
            try:
              characters.append(char.decode(u'rot-13'))
            except UnicodeEncodeError:
              characters.append(char)
          else:
            characters.append(char)

        value_name = u''.join(characters)

      if format_version == 5:
        path_segments = value_name.split(u'\\')

        for segment_index in range(0, len(path_segments)):
          # Remove the { } from the path segment to get the GUID.
          guid = path_segments[segment_index][1:-1]
          path_segments[segment_index] = known_folder_ids.PATHS.get(
              guid, path_segments[segment_index])

        value_name = u'\\'.join(path_segments)
        # Check if we might need to substitute values.
        if u'%' in value_name:
          path_attributes = parser_mediator.knowledge_base.GetPathAttributes()
          value_name = environ_expand.ExpandWindowsEnvironmentVariables(
              value_name, path_attributes)

      value_data_size = len(registry_value.data)
      if not registry_value.DataIsBinaryData():
        parser_mediator.ProduceExtractionError(
            u'Unsupported value data type: {0:s}'.format(
                registry_value.data_type_string))

      elif value_name == u'UEME_CTLSESSION':
        pass

      elif format_version == 3:
        if value_data_size != self._USERASSIST_V3_STRUCT.sizeof():
          parser_mediator.ProduceExtractionError(
              u'Unsupported value data size: {0:d}'.format(value_data_size))

        else:
          parsed_data = self._USERASSIST_V3_STRUCT.parse(registry_value.data)
          filetime = parsed_data.get(u'timestamp', 0)
          count = parsed_data.get(u'count', 0)

          if count > 5:
            count -= 5

          values_dict = {}
          values_dict[value_name] = u'[Count: {0:d}]'.format(count)
          event_object = UserAssistWindowsRegistryEvent(
              filetime, count_subkey.path, registry_value.offset, values_dict)
          parser_mediator.ProduceEvent(event_object)

      elif format_version == 5:
        if value_data_size != self._USERASSIST_V5_STRUCT.sizeof():
          parser_mediator.ProduceExtractionError(
              u'Unsupported value data size: {0:d}'.format(value_data_size))

        parsed_data = self._USERASSIST_V5_STRUCT.parse(registry_value.data)

        userassist_entry_index += 1
        count = parsed_data.get(u'count', None)
        app_focus_count = parsed_data.get(u'app_focus_count', None)
        focus_duration = parsed_data.get(u'focus_duration', None)
        filetime = parsed_data.get(u'timestamp', 0)

        values_dict = {}
        values_dict[value_name] = (
            u'[UserAssist entry: {0:d}, Count: {1:d}, '
            u'Application focus count: {2:d}, Focus duration: {3:d}]').format(
                userassist_entry_index, count, app_focus_count,
                focus_duration)

        event_object = UserAssistWindowsRegistryEvent(
            filetime, count_subkey.path, count_subkey.offset, values_dict)
        parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 16
0
class TaskCachePlugin(interface.WindowsRegistryPlugin):
    """Plugin that parses a Task Cache key."""

    NAME = u'windows_task_cache'
    DESCRIPTION = u'Parser for Task Scheduler cache Registry data.'

    FILTERS = frozenset([
        interface.WindowsRegistryKeyPathFilter(
            u'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\'
            u'CurrentVersion\\Schedule\\TaskCache')
    ])

    URLS = [(u'https://github.com/libyal/winreg-kb/blob/master/documentation/'
             u'Task%20Scheduler%20Keys.asciidoc')]

    _DYNAMIC_INFO_STRUCT = construct.Struct(
        u'dynamic_info_record', construct.ULInt32(u'unknown1'),
        construct.ULInt64(u'last_registered_time'),
        construct.ULInt64(u'launch_time'), construct.ULInt32(u'unknown2'),
        construct.ULInt32(u'unknown3'))

    _DYNAMIC_INFO_STRUCT_SIZE = _DYNAMIC_INFO_STRUCT.sizeof()

    _DYNAMIC_INFO2_STRUCT = construct.Struct(
        u'dynamic_info2_record', construct.ULInt32(u'unknown1'),
        construct.ULInt64(u'last_registered_time'),
        construct.ULInt64(u'launch_time'), construct.ULInt32(u'unknown2'),
        construct.ULInt32(u'unknown3'), construct.ULInt64(u'unknown_time'))

    _DYNAMIC_INFO2_STRUCT_SIZE = _DYNAMIC_INFO2_STRUCT.sizeof()

    def _GetIdValue(self, registry_key):
        """Retrieves the Id value from Task Cache Tree key.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Yields:
      tuple: contains:

        dfwinreg.WinRegistryKey: Windows Registry key.
        dfwinreg.WinRegistryValue: Windows Registry value.
    """
        id_value = registry_key.GetValueByName(u'Id')
        if id_value:
            yield registry_key, id_value

        for sub_key in registry_key.GetSubkeys():
            for value_key, id_value in self._GetIdValue(sub_key):
                yield value_key, id_value

    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        dynamic_info_size_error_reported = False

        tasks_key = registry_key.GetSubkeyByName(u'Tasks')
        tree_key = registry_key.GetSubkeyByName(u'Tree')

        if not tasks_key or not tree_key:
            parser_mediator.ProduceExtractionError(
                u'Task Cache is missing a Tasks or Tree sub key.')
            return

        task_guids = {}
        for sub_key in tree_key.GetSubkeys():
            for value_key, id_value in self._GetIdValue(sub_key):
                # TODO: improve this check to a regex.
                # The GUID is in the form {%GUID%} and stored an UTF-16 little-endian
                # string and should be 78 bytes in size.
                id_value_data_size = len(id_value.data)
                if id_value_data_size != 78:
                    parser_mediator.ProduceExtractionError(
                        u'unsupported Id value data size: {0:d}.'.format(
                            id_value_data_size))
                    continue

                guid_string = id_value.GetDataAsObject()
                task_guids[guid_string] = value_key.name

        for sub_key in tasks_key.GetSubkeys():
            dynamic_info_value = sub_key.GetValueByName(u'DynamicInfo')
            if not dynamic_info_value:
                continue

            dynamic_info_value_data_size = len(dynamic_info_value.data)
            if dynamic_info_value_data_size == self._DYNAMIC_INFO_STRUCT_SIZE:
                dynamic_info_struct = self._DYNAMIC_INFO_STRUCT.parse(
                    dynamic_info_value.data)

            elif dynamic_info_value_data_size == self._DYNAMIC_INFO2_STRUCT_SIZE:
                dynamic_info_struct = self._DYNAMIC_INFO_STRUCT.parse(
                    dynamic_info_value.data)

            else:
                if not dynamic_info_size_error_reported:
                    parser_mediator.ProduceExtractionError(
                        u'unsupported DynamicInfo value data size: {0:d}.'.
                        format(dynamic_info_value_data_size))
                    dynamic_info_size_error_reported = True
                continue

            name = task_guids.get(sub_key.name, sub_key.name)

            values_dict = {}
            values_dict[u'Task: {0:s}'.format(name)] = u'[ID: {0:s}]'.format(
                sub_key.name)

            event_data = windows_events.WindowsRegistryEventData()
            event_data.key_path = registry_key.path
            event_data.offset = registry_key.offset
            event_data.regvalue = values_dict

            event = time_events.DateTimeValuesEvent(
                registry_key.last_written_time,
                eventdata.EventTimestamp.WRITTEN_TIME)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            event_data = TaskCacheEventData()
            event_data.task_name = name
            event_data.task_identifier = sub_key.name

            last_registered_time = dynamic_info_struct.get(
                u'last_registered_time')
            if last_registered_time:
                # Note this is likely either the last registered time or
                # the update time.
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=last_registered_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, u'Last registered time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            launch_time = dynamic_info_struct.get(u'launch_time')
            if launch_time:
                # Note this is likely the launch time.
                date_time = dfdatetime_filetime.Filetime(timestamp=launch_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, u'Launch time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            unknown_time = dynamic_info_struct.get(u'unknown_time')
            if unknown_time:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=unknown_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, eventdata.EventTimestamp.UNKNOWN)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 17
0
class WinRecyclerInfo2Parser(interface.FileObjectParser):
    """Parses the Windows Recycler INFO2 file."""

    NAME = u'recycle_bin_info2'
    DESCRIPTION = u'Parser for Windows Recycler INFO2 files.'

    _FILE_HEADER_STRUCT = construct.Struct(u'file_header',
                                           construct.ULInt32(u'unknown1'),
                                           construct.ULInt32(u'unknown2'),
                                           construct.ULInt32(u'unknown3'),
                                           construct.ULInt32(u'record_size'),
                                           construct.ULInt32(u'unknown4'))

    _RECYCLER_RECORD_STRUCT = construct.Struct(
        u'recycler_record', construct.ULInt32(u'index'),
        construct.ULInt32(u'drive_number'),
        construct.ULInt64(u'deletion_time'), construct.ULInt32(u'file_size'))

    _ASCII_STRING = construct.CString(u'string')

    _RECORD_INDEX_OFFSET = 0x104
    _UNICODE_FILENAME_OFFSET = 0x118

    def _ParseRecord(self, parser_mediator, file_object, record_offset,
                     record_size):
        """Parses an INFO-2 record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.
    """
        record_data = file_object.read(record_size)

        try:
            ascii_filename = self._ASCII_STRING.parse(record_data)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError((
                u'unable to parse recycler ASCII filename at offset: 0x{0:08x} '
                u'with error: {1:s}').format(record_offset, exception))

        try:
            recycler_record_struct = self._RECYCLER_RECORD_STRUCT.parse(
                record_data[self._RECORD_INDEX_OFFSET:])
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                (u'unable to parse recycler index record at offset: 0x{0:08x} '
                 u'with error: {1:s}').format(
                     record_offset + self._RECORD_INDEX_OFFSET, exception))

        unicode_filename = None
        if record_size == 800:
            unicode_filename = binary.ReadUTF16(
                record_data[self._UNICODE_FILENAME_OFFSET:])

        ascii_filename = None
        if ascii_filename and parser_mediator.codepage:
            try:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage)
            except UnicodeDecodeError:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage, errors=u'replace')

        elif ascii_filename:
            ascii_filename = repr(ascii_filename)

        if recycler_record_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=recycler_record_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.drive_number = recycler_record_struct.drive_number
        event_data.original_filename = unicode_filename or ascii_filename
        event_data.file_size = recycler_record_struct.file_size
        event_data.offset = record_offset
        event_data.record_index = recycler_record_struct.index
        event_data.short_filename = ascii_filename

        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.DELETED_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows Recycler INFO2 file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        # Since this header value is really generic it is hard not to use filename
        # as an indicator too.

        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith(u'INFO2'):
            return

        try:
            file_header_struct = self._FILE_HEADER_STRUCT.parse_stream(
                file_object)
        except (construct.FieldError, IOError) as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to parse file header with error: {0:s}'.format(
                    exception))
            return

        if file_header_struct.unknown1 != 5:
            parser_mediator.ProduceExtractionError(
                u'unsupport format signature.')
            return

        record_size = file_header_struct.record_size
        if record_size not in (280, 800):
            parser_mediator.ProduceExtractionError(
                u'unsupported record size: {0:d}'.format(record_size))
            return

        record_offset = self._FILE_HEADER_STRUCT.sizeof()
        file_size = file_object.get_size()

        while record_offset < file_size:
            self._ParseRecord(parser_mediator, file_object, record_offset,
                              record_size)

            record_offset += record_size
Ejemplo n.º 18
0
import ccl_bplist
import construct
import plistlib
"""
An old version of yk_bmdata.py that makes use of construct library.
Limitation: Number of ToCs it can parse
"""

__author__ = 'yukai'

# to reduce code width
_ARRAY = construct.Array
_POINTER = construct.Pointer
_REPEAT = construct.RepeatUntil
_INT = construct.ULInt32('integer')
_INT64 = construct.ULInt64('integer64')

_BOOKMARK_DATA = construct.Struct(
    'bookmark_data',
    construct.String('magic', 4),
    construct.ULInt32('length'),
    construct.UBInt32('version'),
    construct.ULInt32('offset'),  # offset to "FirstToC Offset"
    _POINTER(
        lambda ctx: ctx.offset,
        construct.Struct(
            'ftoc_offset',
            construct.Anchor('abs_offset'),
            construct.ULInt32('offset'),
            _POINTER(
                lambda ctx: ctx.abs_offset + ctx.offset,
Ejemplo n.º 19
0
class WinRecycleBinParser(interface.FileObjectParser):
    """Parses the Windows $Recycle.Bin $I files."""

    NAME = u'recycle_bin'
    DESCRIPTION = u'Parser for Windows $Recycle.Bin $I files.'

    _FILE_HEADER_STRUCT = construct.Struct(
        u'file_header', construct.ULInt64(u'format_version'),
        construct.ULInt64(u'file_size'), construct.ULInt64(u'deletion_time'))

    _FILENAME_V2_STRUCT = construct.Struct(
        u'filename_v2', construct.ULInt32(u'number_of_characters'),
        construct.String(u'string', lambda ctx: ctx.number_of_characters * 2))

    def _ReadFilename(self, parser_mediator, file_object, format_version):
        """Reads the filename.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (FileIO): file-like object.
      format_version (int): format version.

    Returns:
      str: filename
    """
        if format_version == 1:
            return binary.ReadUTF16Stream(file_object)

        try:
            filename_struct = self._FILENAME_V2_STRUCT.parse_stream(
                file_object)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to parse filename with error: {0:s}'.format(
                    exception))
            return

        return binary.ReadUTF16(filename_struct.string)

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows RecycleBin $Ixx file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        # We may have to rely on filenames since this header is very generic.

        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith(u'$I'):
            return

        try:
            header_struct = self._FILE_HEADER_STRUCT.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to parse file header with error: {0:s}'.format(
                    exception))
            return

        if header_struct.format_version not in (1, 2):
            parser_mediator.ProduceExtractionError(
                u'unsupported format version: {0:d}.'.format(
                    header_struct.format_version))
            return

        if header_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=header_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.original_filename = self._ReadFilename(
            parser_mediator, file_object, header_struct.format_version)
        event_data.file_size = header_struct.file_size

        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.DELETED_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 20
0
class RestorePointLogParser(interface.BaseParser):
    """A parser for Windows Restore Point (rp.log) files."""

    NAME = u'rplog'
    DESCRIPTION = u'Parser for Windows Restore Point (rp.log) files.'

    _FILE_HEADER_STRUCT = construct.Struct(
        u'file_header', construct.ULInt32(u'event_type'),
        construct.ULInt32(u'restore_point_type'),
        construct.ULInt64(u'sequence_number'),
        construct.RepeatUntil(lambda obj, ctx: obj == b'\x00\x00',
                              construct.Field(u'description', 2)))

    _FILE_FOOTER_STRUCT = construct.Struct(u'file_footer',
                                           construct.ULInt64(u'creation_time'))

    def _ParseFileHeader(self, file_object):
        """Parses the file header.

    Args:
      file_object: A file-like object to read data from.

    Returns:
      The file header construct object.
    """
        try:
            file_header = self._FILE_HEADER_STRUCT.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse file header with error: {0:s}'.format(
                    exception))

        if not file_header:
            raise errors.UnableToParseFile(u'Unable to read file header')

        return file_header

    def _ParseFileFooter(self, file_object):
        """Parses the file footer.

    Args:
      file_object: A file-like object to read data from.

    Returns:
      The file footer construct object.
    """
        try:
            file_footer = self._FILE_FOOTER_STRUCT.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse file footer with error: {0:s}'.format(
                    exception))

        if not file_footer:
            raise errors.UnableToParseFile(u'Unable to read file footer')

        return file_footer

    def Parse(self, parser_mediator, **kwargs):
        """Parses a single file.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
    """
        # TODO: for now cheat with detecting this file type.
        file_entry = parser_mediator.GetFileEntry()
        if file_entry.name.lower() != u'rp.log':
            raise errors.UnableToParseFile(u'File not named: rp.log')

        file_object = parser_mediator.GetFileObject(offset=None)
        try:
            self.ParseFileObject(parser_mediator, file_object, **kwargs)
        finally:
            file_object.close()

    def ParseFileObject(self, parser_mediator, file_object, **unused_kwargs):
        """Parses a Windows Prefetch file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        file_object.seek(0, os.SEEK_SET)
        file_header = self._ParseFileHeader(file_object)

        try:
            # The struct includes the end-of-string character that we need
            # to strip off.
            description = b''.join(
                file_header.description).decode(u'utf16')[:-1]
        except UnicodeDecodeError as exception:
            description = u''
            logging.warning(
                (u'[{0:s}] Unable to decode description UTF-16 stream with '
                 u'error: {1:s}').format(self.NAME, exception))

        file_object.seek(-8, os.SEEK_END)
        file_footer = self._ParseFileFooter(file_object)

        timestamp = file_footer.get(u'creation_time', 0)
        if not timestamp:
            parser_mediator.ProduceParseError(u'Timestamp not set.')
        else:
            event_object = RestorePointInfoEvent(
                timestamp, file_header.event_type,
                file_header.restore_point_type, file_header.sequence_number,
                description)
            parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 21
0
class EseDbPlugin(plugins.BasePlugin):
    """The ESE database plugin interface."""

    NAME = u'esedb'

    BINARY_DATA_COLUMN_TYPES = frozenset([
        pyesedb.column_types.BINARY_DATA,
        pyesedb.column_types.LARGE_BINARY_DATA
    ])

    FLOATING_POINT_COLUMN_TYPES = frozenset(
        [pyesedb.column_types.FLOAT_32BIT, pyesedb.column_types.DOUBLE_64BIT])

    INTEGER_COLUMN_TYPES = frozenset([
        pyesedb.column_types.CURRENCY, pyesedb.column_types.DATE_TIME,
        pyesedb.column_types.INTEGER_8BIT_UNSIGNED,
        pyesedb.column_types.INTEGER_16BIT_SIGNED,
        pyesedb.column_types.INTEGER_16BIT_UNSIGNED,
        pyesedb.column_types.INTEGER_32BIT_SIGNED,
        pyesedb.column_types.INTEGER_32BIT_UNSIGNED,
        pyesedb.column_types.INTEGER_64BIT_SIGNED
    ])

    STRING_COLUMN_TYPES = frozenset(
        [pyesedb.column_types.TEXT, pyesedb.column_types.LARGE_TEXT])

    _UINT64_BIG_ENDIAN = construct.UBInt64(u'value')
    _UINT64_LITTLE_ENDIAN = construct.ULInt64(u'value')

    # Dictionary containing a callback method per table name.
    # E.g. 'SystemIndex_0A': 'ParseSystemIndex_0A'
    REQUIRED_TABLES = {}
    OPTIONAL_TABLES = {}

    def __init__(self):
        """Initializes the ESE database plugin."""
        super(EseDbPlugin, self).__init__()
        self._required_tables = frozenset(self.REQUIRED_TABLES.keys())
        self._tables = {}
        self._tables.update(self.REQUIRED_TABLES)
        self._tables.update(self.OPTIONAL_TABLES)

    def _ConvertValueBinaryDataToStringAscii(self, value):
        """Converts a binary data value into a string.

    Args:
      value: The binary data value containing an ASCII string or None.

    Returns:
      A string or None if value is None.
    """
        if value:
            return value.decode(u'ascii')

    def _ConvertValueBinaryDataToStringBase16(self, value):
        """Converts a binary data value into a base-16 (hexadecimal) string.

    Args:
      value: The binary data value or None.

    Returns:
      A string or None if value is None.
    """
        if value:
            return value.encode(u'hex')

    def _ConvertValueBinaryDataToUBInt64(self, value):
        """Converts a binary data value into an integer.

    Args:
      value: The binary data value containing an unsigned 64-bit big-endian
             integer.

    Returns:
      An integer or None if value is None.
    """
        if value:
            return self._UINT64_BIG_ENDIAN.parse(value)

    def _ConvertValueBinaryDataToULInt64(self, value):
        """Converts a binary data value into an integer.

    Args:
      value: The binary data value containing an unsigned 64-bit little-endian
             integer.

    Returns:
      An integer or None if value is None.
    """
        if value:
            return self._UINT64_LITTLE_ENDIAN.parse(value)

    def _GetRecordValue(self, record, value_entry):
        """Retrieves a specific value from the record.

    Args:
      record: The ESE record object (instance of pyesedb.record).
      value_entry: The value entry.

    Returns:
      An object containing the value.

    Raises:
      ValueError: if the value is not supported.
    """
        column_type = record.get_column_type(value_entry)
        long_value = None

        if record.is_long_value(value_entry):
            long_value = record.get_value_data_as_long_value(value_entry)

        if record.is_multi_value(value_entry):
            # TODO: implement
            raise ValueError(u'Multi value support not implemented yet.')

        if column_type == pyesedb.column_types.NULL:
            return

        elif column_type == pyesedb.column_types.BOOLEAN:
            # TODO: implement
            raise ValueError(u'Boolean value support not implemented yet.')

        elif column_type in self.INTEGER_COLUMN_TYPES:
            if long_value:
                raise ValueError(u'Long integer value not supported.')
            return record.get_value_data_as_integer(value_entry)

        elif column_type in self.FLOATING_POINT_COLUMN_TYPES:
            if long_value:
                raise ValueError(u'Long floating point value not supported.')
            return record.get_value_data_as_floating_point(value_entry)

        elif column_type in self.STRING_COLUMN_TYPES:
            if long_value:
                return long_value.get_data_as_string()
            return record.get_value_data_as_string(value_entry)

        elif column_type == pyesedb.column_types.GUID:
            # TODO: implement
            raise ValueError(u'GUID value support not implemented yet.')

        if long_value:
            return long_value.get_data()
        return record.get_value_data(value_entry)

    def _GetRecordValues(self, table_name, record, value_mappings=None):
        """Retrieves the values from the record.

    Args:
      table_name: The name of the table.
      record: The ESE record object (instance of pyesedb.record).
      value_mappings: Optional dict of value mappings, which map the column
                      name to a callback method. The default is None.

    Returns:
      An dict containing the values.
    """
        record_values = {}

        for value_entry in range(0, record.number_of_values):
            column_name = record.get_column_name(value_entry)
            if column_name in record_values:
                logging.warning(
                    u'[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                        self.NAME, column_name, table_name))
                continue

            value_callback = None
            if value_mappings and column_name in value_mappings:
                value_callback_method = value_mappings.get(column_name)
                if value_callback_method:
                    value_callback = getattr(self, value_callback_method, None)
                    if value_callback is None:
                        logging.warning((
                            u'[{0:s}] missing value callback method: {1:s} for column: '
                            u'{2:s} in table: {3:s}').format(
                                self.NAME, value_callback_method, column_name,
                                table_name))

            try:
                value = self._GetRecordValue(record, value_entry)
            except ValueError as exception:
                logging.warning(exception)

            if value_callback:
                value = value_callback(value)

            record_values[column_name] = value

        return record_values

    def _GetTableNames(self, database):
        """Retrieves the table names in a database.

    Args:
      database: The ESE database object (instance of pyesedb.file).

    Returns:
      A list of the table names.
    """
        table_names = []
        for esedb_table in database.tables:
            table_names.append(esedb_table.name)

        return table_names

    def GetEntries(self, parser_mediator, database=None, cache=None, **kwargs):
        """Extracts event objects from the database.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      database: Optional ESE database object (instance of pyesedb.file).
                The default is None.
      cache: Optional cache object (instance of EseDbCache). The default is
             None.

    Raises:
      ValueError: If the database attribute is not valid.
    """
        if database is None:
            raise ValueError(u'Invalid database.')

        for table_name, callback_method in self._tables.iteritems():
            if not callback_method:
                # Table names without a callback method are allowed to improve
                # the detection of a database based on its table names.
                continue

            callback = getattr(self, callback_method, None)
            if callback is None:
                logging.warning(
                    u'[{0:s}] missing callback method: {1:s} for table: {2:s}'.
                    format(self.NAME, callback_method, table_name))
                continue

            esedb_table = database.get_table_by_name(table_name)
            if not esedb_table:
                logging.warning(u'[{0:s}] missing table: {1:s}'.format(
                    self.NAME, table_name))
                continue

            # The database is passed in case the database contains table names
            # that are assigned dynamically and cannot be defined by
            # the table name-callback mechanism.
            callback(parser_mediator,
                     database=database,
                     table=esedb_table,
                     cache=cache,
                     **kwargs)

    def Process(self, parser_mediator, database=None, cache=None, **kwargs):
        """Determines if this is the appropriate plugin for the database.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      database: Optional ESE database object (instance of pyesedb.file).
                The default is None.
      cache: Optional cache object (instance of EseDbCache). The default is
             None.

    Raises:
      errors.WrongPlugin: If the database does not contain all the tables
                          defined in the required_tables set.
      ValueError: If the database attribute is not valid.
    """
        if database is None:
            raise ValueError(u'Invalid database.')

        table_names = frozenset(self._GetTableNames(database))
        if self._required_tables.difference(table_names):
            raise errors.WrongPlugin(
                u'[{0:s}] required tables not found.'.format(self.NAME))

        # This will raise if unhandled keyword arguments are passed.
        super(EseDbPlugin, self).Process(parser_mediator)

        self.GetEntries(parser_mediator,
                        database=database,
                        cache=cache,
                        **kwargs)
Ejemplo n.º 22
0
class ESEDBPlugin(plugins.BasePlugin):
  """The ESE database plugin interface."""

  NAME = 'esedb'

  BINARY_DATA_COLUMN_TYPES = frozenset([
      pyesedb.column_types.BINARY_DATA,
      pyesedb.column_types.LARGE_BINARY_DATA])

  FLOATING_POINT_COLUMN_TYPES = frozenset([
      pyesedb.column_types.FLOAT_32BIT,
      pyesedb.column_types.DOUBLE_64BIT])

  INTEGER_COLUMN_TYPES = frozenset([
      pyesedb.column_types.CURRENCY,
      pyesedb.column_types.DATE_TIME,
      pyesedb.column_types.INTEGER_8BIT_UNSIGNED,
      pyesedb.column_types.INTEGER_16BIT_SIGNED,
      pyesedb.column_types.INTEGER_16BIT_UNSIGNED,
      pyesedb.column_types.INTEGER_32BIT_SIGNED,
      pyesedb.column_types.INTEGER_32BIT_UNSIGNED,
      pyesedb.column_types.INTEGER_64BIT_SIGNED])

  STRING_COLUMN_TYPES = frozenset([
      pyesedb.column_types.TEXT,
      pyesedb.column_types.LARGE_TEXT])

  _UINT64_BIG_ENDIAN = construct.UBInt64('value')
  _UINT64_LITTLE_ENDIAN = construct.ULInt64('value')

  # Dictionary containing a callback method per table name.
  # E.g. 'SystemIndex_0A': 'ParseSystemIndex_0A'
  REQUIRED_TABLES = {}
  OPTIONAL_TABLES = {}

  def __init__(self):
    """Initializes the ESE database plugin."""
    super(ESEDBPlugin, self).__init__()
    self._tables = {}
    self._tables.update(self.REQUIRED_TABLES)
    self._tables.update(self.OPTIONAL_TABLES)

  @property
  def required_tables(self):
    """set[str]: required table names."""
    return frozenset(self.REQUIRED_TABLES.keys())

  def _ConvertValueBinaryDataToStringAscii(self, value):
    """Converts a binary data value into a string.

    Args:
      value (bytes): binary data value containing an ASCII string or None.

    Returns:
      str: string representation of binary data value or None.
    """
    if value:
      return value.decode('ascii')

    return None

  def _ConvertValueBinaryDataToStringBase16(self, value):
    """Converts a binary data value into a base-16 (hexadecimal) string.

    Args:
      value (bytes): binary data value or None.

    Returns:
      str: string representation of binary data value or None.
    """
    if value:
      return value.encode('hex')

    return None

  def _ConvertValueBinaryDataToUBInt64(self, value):
    """Converts a binary data value into an integer.

    Args:
      value (bytes): binary data value containing an unsigned 64-bit big-endian
          integer.

    Returns:
      int: integer representation of binary data value or None.
    """
    if value:
      return self._UINT64_BIG_ENDIAN.parse(value)

    return None

  def _ConvertValueBinaryDataToULInt64(self, value):
    """Converts a binary data value into an integer.

    Args:
      value (int): binary data value containing an unsigned 64-bit little-endian
          integer.

    Returns:
      int: integer representation of binary data value or None.
    """
    if value:
      return self._UINT64_LITTLE_ENDIAN.parse(value)

    return None

  def _GetRecordValue(self, record, value_entry):
    """Retrieves a specific value from the record.

    Args:
      record (pyesedb.record): ESE record.
      value_entry (int): value entry.

    Returns:
      object: value.

    Raises:
      ValueError: if the value is not supported.
    """
    column_type = record.get_column_type(value_entry)
    long_value = None

    if record.is_long_value(value_entry):
      long_value = record.get_value_data_as_long_value(value_entry)

    if record.is_multi_value(value_entry):
      # TODO: implement
      raise ValueError('Multi value support not implemented yet.')

    if column_type == pyesedb.column_types.NULL:
      return None

    elif column_type == pyesedb.column_types.BOOLEAN:
      # TODO: implement
      raise ValueError('Boolean value support not implemented yet.')

    elif column_type in self.INTEGER_COLUMN_TYPES:
      if long_value:
        raise ValueError('Long integer value not supported.')
      return record.get_value_data_as_integer(value_entry)

    elif column_type in self.FLOATING_POINT_COLUMN_TYPES:
      if long_value:
        raise ValueError('Long floating point value not supported.')
      return record.get_value_data_as_floating_point(value_entry)

    elif column_type in self.STRING_COLUMN_TYPES:
      if long_value:
        return long_value.get_data_as_string()
      return record.get_value_data_as_string(value_entry)

    elif column_type == pyesedb.column_types.GUID:
      # TODO: implement
      raise ValueError('GUID value support not implemented yet.')

    if long_value:
      return long_value.get_data()
    return record.get_value_data(value_entry)

  def _GetRecordValues(
      self, parser_mediator, table_name, record, value_mappings=None):
    """Retrieves the values from the record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table_name (str): name of the table.
      record (pyesedb.record): ESE record.
      value_mappings (Optional[dict[str, str]): value mappings, which map
          the column name to a callback method.

    Returns:
      dict[str,object]: values per column name.
    """
    record_values = {}

    for value_entry in range(0, record.number_of_values):
      if parser_mediator.abort:
        break

      column_name = record.get_column_name(value_entry)
      if column_name in record_values:
        logger.warning(
            '[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                self.NAME, column_name, table_name))
        continue

      value_callback = None
      if value_mappings and column_name in value_mappings:
        value_callback_method = value_mappings.get(column_name)
        if value_callback_method:
          value_callback = getattr(self, value_callback_method, None)
          if value_callback is None:
            logger.warning((
                '[{0:s}] missing value callback method: {1:s} for column: '
                '{2:s} in table: {3:s}').format(
                    self.NAME, value_callback_method, column_name, table_name))

      if value_callback:
        try:
          value_data = record.get_value_data(value_entry)
          value = value_callback(value_data)

        except Exception as exception:  # pylint: disable=broad-except
          logger.error(exception)
          value = None
          parser_mediator.ProduceExtractionError((
              'unable to parse value: {0:s} with callback: {1:s} with error: '
              '{2!s}').format(column_name, value_callback_method, exception))

      else:
        try:
          value = self._GetRecordValue(record, value_entry)
        except ValueError as exception:
          value = None
          parser_mediator.ProduceExtractionError(
              'unable to parse value: {0:s} with error: {1!s}'.format(
                  column_name, exception))

      record_values[column_name] = value

    return record_values

  # pylint 1.9.3 wants a docstring for kwargs, but this is not useful to add.
  # pylint: disable=missing-param-doc
  def GetEntries(self, parser_mediator, cache=None, database=None, **kwargs):
    """Extracts event objects from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (Optional[ESEDBCache]): cache.
      database (Optional[pyesedb.file]): ESE database.

    Raises:
      ValueError: If the database attribute is not valid.
    """
    if database is None:
      raise ValueError('Invalid database.')

    for table_name, callback_method in iter(self._tables.items()):
      if parser_mediator.abort:
        break

      if not callback_method:
        # Table names without a callback method are allowed to improve
        # the detection of a database based on its table names.
        continue

      callback = getattr(self, callback_method, None)
      if callback is None:
        logger.warning(
            '[{0:s}] missing callback method: {1:s} for table: {2:s}'.format(
                self.NAME, callback_method, table_name))
        continue

      esedb_table = database.get_table_by_name(table_name)
      if not esedb_table:
        logger.warning('[{0:s}] missing table: {1:s}'.format(
            self.NAME, table_name))
        continue

      # The database is passed in case the database contains table names
      # that are assigned dynamically and cannot be defined by
      # the table name-callback mechanism.
      callback(
          parser_mediator, cache=cache, database=database, table=esedb_table,
          **kwargs)

  # pylint 1.9.3 wants a docstring for kwargs, but this is not useful to add.
  # pylint: disable=missing-param-doc,arguments-differ
  def Process(self, parser_mediator, cache=None, database=None, **kwargs):
    """Determines if this is the appropriate plugin for the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (Optional[ESEDBCache]): cache.
      database (Optional[pyesedb.file]): ESE database.

    Raises:
      ValueError: If the database attribute is not valid.
    """
    if database is None:
      raise ValueError('Invalid database.')

    # This will raise if unhandled keyword arguments are passed.
    super(ESEDBPlugin, self).Process(parser_mediator)

    self.GetEntries(
        parser_mediator, cache=cache, database=database, **kwargs)
Ejemplo n.º 23
0
class RestorePointLogParser(interface.FileObjectParser):
  """A parser for Windows Restore Point (rp.log) files."""

  NAME = u'rplog'
  DESCRIPTION = u'Parser for Windows Restore Point (rp.log) files.'

  FILTERS = frozenset([
      interface.FileNameFileEntryFilter(u'rp.log')])

  _FILE_HEADER_STRUCT = construct.Struct(
      u'file_header',
      construct.ULInt32(u'event_type'),
      construct.ULInt32(u'restore_point_type'),
      construct.ULInt64(u'sequence_number'),
      construct.RepeatUntil(
          lambda character, ctx: character == b'\x00\x00',
          construct.Field(u'description', 2)))

  _FILE_FOOTER_STRUCT = construct.Struct(
      u'file_footer',
      construct.ULInt64(u'creation_time'))

  def _ParseFileHeader(self, file_object):
    """Parses the file header.

    Args:
      file_object: A file-like object to read data from.

    Returns:
      The file header construct object.

    Raises:
      UnableToParseFile: when the header cannot be parsed.
    """
    try:
      file_header = self._FILE_HEADER_STRUCT.parse_stream(file_object)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          u'Unable to parse file header with error: {0:s}'.format(exception))

    if not file_header:
      raise errors.UnableToParseFile(u'Unable to read file header')

    return file_header

  def _ParseFileFooter(self, file_object):
    """Parses the file footer.

    Args:
      file_object: A file-like object to read data from.

    Returns:
      The file footer construct object.

    Raises:
      UnableToParseFile: when the footer cannot be parsed.
    """
    try:
      file_footer = self._FILE_FOOTER_STRUCT.parse_stream(file_object)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          u'Unable to parse file footer with error: {0:s}'.format(exception))

    if not file_footer:
      raise errors.UnableToParseFile(u'Unable to read file footer')

    return file_footer

  def ParseFileObject(self, parser_mediator, file_object, **unused_kwargs):
    """Parses a Windows Restore Point (rp.log) log file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
    file_header = self._ParseFileHeader(file_object)

    try:
      # The struct includes the end-of-string character that we need
      # to strip off.
      description = b''.join(file_header.description).decode(u'utf16')[:-1]
    except UnicodeDecodeError as exception:
      description = u''
      parser_mediator.ProduceParseError((
          u'unable to decode description UTF-16 stream with error: '
          u'{0:s}').format(exception))

    file_object.seek(-8, os.SEEK_END)
    file_footer = self._ParseFileFooter(file_object)

    timestamp = file_footer.get(u'creation_time', None)
    if timestamp is None:
      parser_mediator.ProduceParseError(u'Timestamp not set.')
    else:
      event_object = RestorePointInfoEvent(
          timestamp, file_header.event_type, file_header.restore_point_type,
          file_header.sequence_number, description)
      parser_mediator.ProduceEvent(event_object)
Ejemplo n.º 24
0
class NTFSUsnJrnlParser(interface.FileObjectParser):
    """Parses a NTFS USN change journal."""

    _INITIAL_FILE_OFFSET = None

    NAME = u'usnjrnl'
    DESCRIPTION = u'Parser for NTFS USN change journal ($UsnJrnl).'

    _USN_RECORD_V2 = construct.Struct(
        u'usn_record_v2', construct.ULInt32(u'size'),
        construct.ULInt16(u'major_version'),
        construct.ULInt16(u'minor_version'),
        construct.ULInt64(u'file_reference'),
        construct.ULInt64(u'parent_file_reference'),
        construct.ULInt64(u'update_sequence_number'),
        construct.ULInt64(u'update_date_time'),
        construct.ULInt32(u'update_reason_flags'),
        construct.ULInt32(u'update_source_flags'),
        construct.ULInt32(u'security_descriptor_identifier'),
        construct.ULInt32(u'file_attribute_flags'),
        construct.ULInt16(u'name_size'), construct.ULInt16(u'name_offset'),
        construct.String(u'name', lambda ctx: ctx.size - 60))

    # TODO: add support for USN_RECORD_V3 when actually seen to be used.

    def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
        """Parses an USN change journal.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal.
    """
        if not usn_change_journal:
            return

        usn_record_data = usn_change_journal.read_usn_record()
        while usn_record_data:
            current_offset = usn_change_journal.get_offset()

            try:
                usn_record_struct = self._USN_RECORD_V2.parse(usn_record_data)
            except (IOError, construct.FieldError) as exception:
                parser_mediator.ProduceExtractionError(
                    (u'unable to parse USN record at offset: 0x{0:08x} '
                     u'with error: {1:s}').format(current_offset, exception))
                continue

            name_offset = usn_record_struct.name_offset - 60
            utf16_stream = usn_record_struct.name[name_offset:usn_record_struct
                                                  .name_size]

            try:
                name_string = utf16_stream.decode(u'utf-16-le')
            except (UnicodeDecodeError, UnicodeEncodeError) as exception:
                name_string = utf16_stream.decode(u'utf-16-le',
                                                  errors=u'replace')
                parser_mediator.ProduceExtractionError((
                    u'unable to decode USN record name string with error: '
                    u'{0:s}. Characters that cannot be decoded will be replaced '
                    u'with "?" or "\\ufffd".').format(exception))

            event_data = NTFSUSNChangeEventData()
            event_data.file_attribute_flags = usn_record_struct.file_attribute_flags
            event_data.file_reference = usn_record_struct.file_reference
            event_data.filename = name_string
            event_data.offset = current_offset
            event_data.parent_file_reference = usn_record_struct.parent_file_reference
            event_data.update_reason_flags = usn_record_struct.update_reason_flags
            event_data.update_sequence_number = (
                usn_record_struct.update_sequence_number)
            event_data.update_source_flags = usn_record_struct.update_source_flags

            if not usn_record_struct.update_date_time:
                date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=usn_record_struct.update_date_time)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            usn_record_data = usn_change_journal.read_usn_record()

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a NTFS $UsnJrnl metadata file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        volume = pyfsntfs.volume()
        try:
            volume.open_file_object(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to open NTFS volume with error: {0:s}'.format(
                    exception))

        try:
            usn_change_journal = volume.get_usn_change_journal()
            self._ParseUSNChangeJournal(parser_mediator, usn_change_journal)
        finally:
            volume.close()
Ejemplo n.º 25
0
class UserAssistCollector(collector.WindowsVolumeCollector):
    """Class that defines a Windows User Assist information collector."""

    _USER_ASSIST_KEY = (
        u'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
        u'Explorer\\UserAssist')

    # UserAssist format version used in Windows 2000, XP, 2003, Vista.
    _USER_ASSIST_V3_STRUCT = construct.Struct(
        u'user_assist_entry', construct.ULInt32(u'unknown1'),
        construct.ULInt32(u'execution_count'),
        construct.ULInt64(u'last_execution_time'))

    # UserAssist format version used in Windows 2008, 7, 8.
    _USER_ASSIST_V5_STRUCT = construct.Struct(
        u'user_assist_entry', construct.ULInt32(u'unknown1'),
        construct.ULInt32(u'execution_count'),
        construct.ULInt32(u'application_focus_count'),
        construct.ULInt32(u'application_focus_duration'),
        construct.LFloat32(u'unknown2'), construct.LFloat32(u'unknown3'),
        construct.LFloat32(u'unknown4'), construct.LFloat32(u'unknown5'),
        construct.LFloat32(u'unknown6'), construct.LFloat32(u'unknown7'),
        construct.LFloat32(u'unknown8'), construct.LFloat32(u'unknown9'),
        construct.LFloat32(u'unknown10'), construct.LFloat32(u'unknown11'),
        construct.ULInt32(u'unknown12'),
        construct.ULInt64(u'last_execution_time'),
        construct.ULInt32(u'unknown13'))

    def __init__(self, debug=False, mediator=None):
        """Initializes the collector object.

    Args:
      debug: optional boolean value to indicate if debug information should
             be printed.
      mediator: a volume scanner mediator (instance of
                dfvfs.VolumeScannerMediator) or None.
    """
        super(UserAssistCollector, self).__init__(mediator=mediator)
        self._debug = debug
        registry_file_reader = collector.CollectorRegistryFileReader(self)
        self._registry = registry.WinRegistry(
            registry_file_reader=registry_file_reader)

        self.found_user_assist_key = False

    # TODO: replace print by output_writer.
    def _CollectUserAssistFromKey(self, unused_output_writer, guid_sub_key):
        """Collects the User Assist information from a GUID sub key.

    Args:
      output_writer: the output writer object.
      guid_sub_key: the User Assist GUID Registry key (instance of
                    dfwinreg.WinRegistryKey).
    """
        version_value = guid_sub_key.GetValueByName(u'Version')
        if not version_value:
            logging.warning(u'Missing Version value in sub key: {0:s}'.format(
                guid_sub_key.name))
            return

        format_version = version_value.GetDataAsObject()
        if format_version == 3:
            value_data_size = self._USER_ASSIST_V3_STRUCT.sizeof()
        elif format_version == 5:
            value_data_size = self._USER_ASSIST_V5_STRUCT.sizeof()

        print(u'GUID\t\t: {0:s}'.format(guid_sub_key.name))
        print(u'Format version\t: {0:d}'.format(format_version))
        print(u'')

        count_sub_key = guid_sub_key.GetSubkeyByName(u'Count')
        for value in count_sub_key.GetValues():
            output_string = u'Original name\t: {0:s}'.format(value.name)
            print(output_string.encode(u'utf-8'))

            try:
                value_name = value.name.decode(u'rot-13')
            except UnicodeEncodeError as exception:
                characters = []
                for char in value.name:
                    if ord(char) < 128:
                        try:
                            characters.append(char.decode(u'rot-13'))
                        except UnicodeEncodeError:
                            characters.append(char)
                    else:
                        characters.append(char)

                value_name = u''.join(characters)

            try:
                output_string = u'Converted name\t: {0:s}'.format(value_name)
                print(output_string.encode(u'utf-8'))
            except UnicodeEncodeError as exception:
                logging.warning(
                    u'Unable to convert: {0:s} with error: {1:s}'.format(
                        value.name, exception))

            print(u'Value data:')
            print(hexdump.Hexdump(value.data))

            if value_name != u'UEME_CTLSESSION':
                if value_data_size != len(value.data):
                    logging.warning(
                        (u'Version: {0:d} size mismatch (calculated: {1:d}, '
                         u'stored: {2:d}).').format(format_version,
                                                    value_data_size,
                                                    len(value.data)))
                    return

                if format_version == 3:
                    parsed_data = self._USER_ASSIST_V3_STRUCT.parse(value.data)
                elif format_version == 5:
                    parsed_data = self._USER_ASSIST_V5_STRUCT.parse(value.data)

                print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    parsed_data.get(u'unknown1')))

                print(u'Execution count\t\t\t\t\t\t\t\t: {0:d}'.format(
                    parsed_data.get(u'execution_count')))

                if format_version == 5:
                    print(
                        u'Application focus count\t\t\t\t\t\t\t: {0:d}'.format(
                            parsed_data.get(u'application_focus_count')))

                    print(u'Application focus duration\t\t\t\t\t\t: {0:d}'.
                          format(
                              parsed_data.get(u'application_focus_duration')))

                    print(u'Unknown2\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown2')))

                    print(u'Unknown3\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown3')))

                    print(u'Unknown4\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown4')))

                    print(u'Unknown5\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown5')))

                    print(u'Unknown6\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown6')))

                    print(u'Unknown7\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown7')))

                    print(u'Unknown8\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown8')))

                    print(u'Unknown9\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown9')))

                    print(u'Unknown10\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown10')))

                    print(u'Unknown11\t\t\t\t\t\t\t\t: {0:.2f}'.format(
                        parsed_data.get(u'unknown11')))

                    print(u'Unknown12\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        parsed_data.get(u'unknown12')))

                timestamp = parsed_data.get(u'last_execution_time')
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp / 10))

                print(u'Last execution time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.
                      format(date_string, timestamp))

                if format_version == 5:
                    print(u'Unknown13\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        parsed_data.get(u'unknown13')))

                print(u'')

    def Collect(self, output_writer):
        """Collects the User Assist information.

    Args:
      output_writer: the output writer object.
    """
        self.found_user_assist_key = False

        user_assist_key = self._registry.GetKeyByPath(self._USER_ASSIST_KEY)
        if not user_assist_key:
            return

        self.found_user_assist_key = True

        print(u'Key: {0:s}'.format(self._USER_ASSIST_KEY))
        print(u'')

        for guid_sub_key in user_assist_key.GetSubkeys():
            self._CollectUserAssistFromKey(output_writer, guid_sub_key)
Ejemplo n.º 26
0
class IndexFile(object):
  """Class that contains an index file."""

  SIGNATURE = 0xc103cac3

  _FILE_HEADER = construct.Struct(
      u'chrome_cache_index_file_header',
      construct.ULInt32(u'signature'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt16(u'major_version'),
      construct.ULInt32(u'number_of_entries'),
      construct.ULInt32(u'stored_data_size'),
      construct.ULInt32(u'last_created_file_number'),
      construct.ULInt32(u'unknown1'),
      construct.ULInt32(u'unknown2'),
      construct.ULInt32(u'table_size'),
      construct.ULInt32(u'unknown3'),
      construct.ULInt32(u'unknown4'),
      construct.ULInt64(u'creation_time'),
      construct.Padding(208))

  _LRU_DATA = construct.Struct(
      u'chrome_cache_index_file_lru_data',
      construct.Padding(8),
      construct.ULInt32(u'filled_flag'),
      construct.Array(5, construct.ULInt32(u'sizes')),
      construct.Array(5, construct.ULInt32(u'head_addresses')),
      construct.Array(5, construct.ULInt32(u'tail_addresses')),
      construct.ULInt32(u'transaction_address'),
      construct.ULInt32(u'operation'),
      construct.ULInt32(u'operation_list'),
      construct.Padding(28))

  def __init__(self, debug=False):
    """Initializes the index file object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
    """
    super(IndexFile, self).__init__()
    self._debug = debug
    self._file_object = None
    self._file_object_opened_in_object = False
    self.creation_time = None
    self.version = None
    self.index_table = {}

  def _ReadFileHeader(self):
    """Reads the file header.

    Raises:
      IOError: if the file header cannot be read.
    """
    if self._debug:
      print(u'Seeking file header offset: 0x{0:08x}'.format(0))

    self._file_object.seek(0, os.SEEK_SET)

    file_header_data = self._file_object.read(self._FILE_HEADER.sizeof())

    if self._debug:
      print(u'Index file header data:')
      print(hexdump.Hexdump(file_header_data))

    try:
      file_header = self._FILE_HEADER.parse(file_header_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse file header with error: {0:s}'.format(
          exception))

    signature = file_header.get(u'signature')

    if signature != self.SIGNATURE:
      raise IOError(u'Unsupported index file signature')

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'),
        file_header.get(u'minor_version'))

    if self.version not in [u'2.0', u'2.1']:
      raise IOError(u'Unsupported index file version: {0:s}'.format(
          self.version))

    self.creation_time = file_header.get(u'creation_time')

    if self._debug:
      print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(signature))

      print(u'Version\t\t\t\t\t\t\t\t\t: {0:s}'.format(self.version))

      print(u'Number of entries\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'number_of_entries')))

      print(u'Stored data size\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'stored_data_size')))

      print(u'Last created file number\t\t\t\t\t\t: f_{0:06x}'.format(
          file_header.get(u'last_created_file_number')))

      print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown1')))

      print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown2')))

      print(u'Table size\t\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'table_size')))

      print(u'Unknown3\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown3')))

      print(u'Unknown4\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown4')))

      date_string = (
          datetime.datetime(1601, 1, 1) +
          datetime.timedelta(microseconds=self.creation_time))

      print(u'Creation time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format(
          date_string, self.creation_time))

      print(u'')

  def _ReadLruData(self):
    """Reads the LRU data."""
    lru_data = self._file_object.read(self._LRU_DATA.sizeof())

    if self._debug:
      print(u'Index file LRU data:')
      print(hexdump.Hexdump(lru_data))

    try:
      index_file_lru = self._LRU_DATA.parse(lru_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse LRU data with error: {0:s}'.format(
          exception))

    if self._debug:
      print(u'Filled flag\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'filled_flag')))

      for value in index_file_lru.get(u'sizes'):
        print(u'Size\t\t\t\t\t\t\t\t\t: {0:d}'.format(value))

      cache_address_index = 0
      for value in index_file_lru.get(u'head_addresses'):
        cache_address = CacheAddress(value)
        print(u'Head address: {0:d}\t\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      cache_address_index = 0
      for value in index_file_lru.get(u'tail_addresses'):
        cache_address = CacheAddress(value)
        print(u'Tail address: {0:d}\t\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      cache_address = CacheAddress(index_file_lru.get(u'transaction_address'))
      print(u'Transaction address\t\t\t\t\t\t\t: {0:s}'.format(
          cache_address.GetDebugString()))

      print(u'Operation\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'operation')))

      print(u'Operation list\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'operation_list')))

      print(u'')

  def _ReadIndexTable(self):
    """Reads the index table."""
    cache_address_index = 0
    cache_address_data = self._file_object.read(4)

    while len(cache_address_data) == 4:
      value = construct.ULInt32(u'cache_address').parse(cache_address_data)

      if value:
        cache_address = CacheAddress(value)

        if self._debug:
          print(u'Cache address: {0:d}\t\t\t\t\t\t\t: {1:s}'.format(
              cache_address_index, cache_address.GetDebugString()))

        self.index_table[cache_address_index] = cache_address

      cache_address_index += 1
      cache_address_data = self._file_object.read(4)

    if self._debug:
      print(u'')

  def Close(self):
    """Closes the index file."""
    if self._file_object_opened_in_object:
      self._file_object.close()
    self._file_object = None

  def Open(self, filename):
    """Opens the index file.

    Args:
      filename (str): path of the file.
    """
    self._file_object = open(filename, 'rb')
    self._file_object_opened_in_object = True
    self._ReadFileHeader()
    self._ReadLruData()
    self._ReadIndexTable()

  def OpenFileObject(self, file_object):
    """Opens the index file-like object.

    Args:
      file_object (file): file-like object.
    """
    self._file_object = file_object
    self._file_object_opened_in_object = False
    self._ReadFileHeader()
    self._ReadLruData()
    self._ReadIndexTable()
Ejemplo n.º 27
0
    def ParseCachedEntry(self, format_type, value_data, cached_entry_offset,
                         cached_entry_size):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the cached entry data relative to the start of
                           the value data.
      cached_entry_size: integer value that contains the cached entry data size.

    Returns:
      A cached entry object (instance of AppCompatCacheCachedEntry).

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[
            cached_entry_offset:cached_entry_offset + cached_entry_size]

        cached_entry_struct = None

        if format_type == self.FORMAT_TYPE_XP:
            if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_2003:
            if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_7:
            if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof():
                cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_8:
            if cached_entry_data[0:4] not in [
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1
            ]:
                raise RuntimeError(u'Unsupported cache entry signature')

            if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse(
                    cached_entry_data)

                cached_entry_data_size = cached_entry_struct.get(
                    'cached_entry_data_size')
                cached_entry_size = 12 + cached_entry_data_size

                cached_entry_data = value_data[
                    cached_entry_offset:cached_entry_offset +
                    cached_entry_size]

        if not cached_entry_struct:
            raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format(
                cached_entry_size))

        cached_entry_object = AppCompatCacheCachedEntry()
        cached_entry_object.cached_entry_size = cached_entry_size

        path_offset = 0
        data_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            string_size = 0
            for string_index in xrange(0, 528, 2):
                if (ord(cached_entry_data[string_index]) == 0
                        and ord(cached_entry_data[string_index + 1]) == 0):
                    break
                string_size += 2

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[0:string_size])

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = cached_entry_struct.get('path_size')
            path_offset = cached_entry_struct.get('path_offset')

        elif format_type == self.FORMAT_TYPE_8:
            path_size = cached_entry_struct.get('path_size')

            cached_entry_data_offset = 14 + path_size
            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[14:cached_entry_data_offset])

            remaining_data = cached_entry_data[cached_entry_data_offset:]

            cached_entry_object.insertion_flags = construct.ULInt32(
                'insertion_flags').parse(remaining_data[0:4])
            cached_entry_object.shim_flags = construct.ULInt32(
                'shim_flags').parse(remaining_data[4:8])

            if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0:
                cached_entry_data_offset += 8

            elif cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_1:
                cached_entry_data_offset += 10

            remaining_data = cached_entry_data[cached_entry_data_offset:]

        if format_type in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7
        ]:
            cached_entry_object.last_modification_time = cached_entry_struct.get(
                'last_modification_time')

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_object.last_modification_time = construct.ULInt64(
                'last_modification_time').parse(remaining_data[0:8])

        if format_type in [self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003]:
            cached_entry_object.file_size = cached_entry_struct.get(
                'file_size')

        elif format_type in [self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7]:
            cached_entry_object.insertion_flags = cached_entry_struct.get(
                'insertion_flags')
            cached_entry_object.shim_flags = cached_entry_struct.get(
                'shim_flags')

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_object.last_update_time = cached_entry_struct.get(
                'last_update_time')

        if format_type == self.FORMAT_TYPE_7:
            data_offset = cached_entry_struct.get('data_offset')
            data_size = cached_entry_struct.get('data_size')

        elif format_type == self.FORMAT_TYPE_8:
            data_offset = cached_entry_offset + cached_entry_data_offset + 12
            data_size = construct.ULInt32('data_size').parse(
                remaining_data[8:12])

        if path_offset > 0 and path_size > 0:
            path_size += path_offset

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                value_data[path_offset:path_size])

        if data_size > 0:
            data_size += data_offset

            cached_entry_object.data = value_data[data_offset:data_size]

        return cached_entry_object
Ejemplo n.º 28
0
class DataBlockFile(object):
  """Class that contains a data block file."""

  SIGNATURE = 0xc104cac3

  # TODO: update emtpy, hints, updating and user.
  _FILE_HEADER = construct.Struct(
      u'chrome_cache_data_file_header',
      construct.ULInt32(u'signature'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt16(u'major_version'),
      construct.ULInt16(u'file_number'),
      construct.ULInt16(u'next_file_number'),
      construct.ULInt32(u'block_size'),
      construct.ULInt32(u'number_of_entries'),
      construct.ULInt32(u'maximum_number_of_entries'),
      construct.Array(4, construct.ULInt32(u'emtpy')),
      construct.Array(4, construct.ULInt32(u'hints')),
      construct.ULInt32(u'updating'),
      construct.Array(5, construct.ULInt32(u'user')),
      construct.Array(2028, construct.ULInt32(u'allocation_bitmap')))

  _CACHE_ENTRY = construct.Struct(
      u'chrome_cache_entry',
      construct.ULInt32(u'hash'),
      construct.ULInt32(u'next_address'),
      construct.ULInt32(u'rankings_node_address'),
      construct.ULInt32(u'reuse_count'),
      construct.ULInt32(u'refetch_count'),
      construct.ULInt32(u'state'),
      construct.ULInt64(u'creation_time'),
      construct.ULInt32(u'key_size'),
      construct.ULInt32(u'long_key_address'),
      construct.Array(4, construct.ULInt32(u'data_stream_sizes')),
      construct.Array(4, construct.ULInt32(u'data_stream_addresses')),
      construct.ULInt32(u'flags'),
      construct.Padding(16),
      construct.ULInt32(u'self_hash'),
      construct.Array(160, construct.UBInt8(u'key')))

  def __init__(self, debug=False):
    """Initializes the data block file object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
    """
    super(DataBlockFile, self).__init__()
    self._debug = debug
    self._file_object = None
    self._file_object_opened_in_object = False
    self.creation_time = None
    self.block_size = None
    self.number_of_entries = None
    self.version = None

  def _ReadFileHeader(self):
    """Reads the file header.

    Raises:
      IOError: if the file header cannot be read.
    """
    if self._debug:
      print(u'Seeking file header offset: 0x{0:08x}'.format(0))

    self._file_object.seek(0, os.SEEK_SET)

    file_header_data = self._file_object.read(self._FILE_HEADER.sizeof())

    if self._debug:
      print(u'Data block file header data:')
      print(hexdump.Hexdump(file_header_data))

    try:
      file_header = self._FILE_HEADER.parse(file_header_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse file header with error: {0:s}'.format(
          exception))

    signature = file_header.get(u'signature')

    if signature != self.SIGNATURE:
      raise IOError(u'Unsupported data block file signature')

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'),
        file_header.get(u'minor_version'))

    if self.version not in [u'2.0', u'2.1']:
      raise IOError(u'Unsupported data block file version: {0:s}'.format(
          self.version))

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'), file_header.get(u'minor_version'))

    self.block_size = file_header.get(u'block_size')
    self.number_of_entries = file_header.get(u'number_of_entries')

    if self._debug:
      print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(signature))

      print(u'Version\t\t\t\t\t\t\t\t\t: {0:s}'.format(self.version))

      print(u'File number\t\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'file_number')))

      print(u'Next file number\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'next_file_number')))

      print(u'Block size\t\t\t\t\t\t\t\t: {0:d}'.format(self.block_size))

      print(u'Number of entries\t\t\t\t\t\t\t: {0:d}'.format(
          self.number_of_entries))

      print(u'Maximum number of entries\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'maximum_number_of_entries')))

      # TODO: print emtpy, hints, updating and user.

      block_number = 0
      block_range_start = 0
      block_range_end = 0
      in_block_range = False
      for value_32bit in file_header.get(u'allocation_bitmap'):
        for unused_bit in range(0, 32):
          if value_32bit & 0x00000001:
            if not in_block_range:
              block_range_start = block_number
              block_range_end = block_number
              in_block_range = True

            block_range_end += 1

          elif in_block_range:
            in_block_range = False

            if self._debug:
              print(u'Block range\t: {0:d} - {1:d} ({2:d})'.format(
                  block_range_start, block_range_end,
                  block_range_end - block_range_start))

          value_32bit >>= 1
          block_number += 1

      print(u'')

  def ReadCacheEntry(self, block_offset):
    """Reads a cache entry.

    Args:
      block_offset (int): offset of the block that contains the cache entry.
    ""
    if self._debug:
      print(u'Seeking cache entry offset: 0x{0:08x}'.format(block_offset))

    self._file_object.seek(block_offset, os.SEEK_SET)

    cache_entry_data = self._file_object.read(self._CACHE_ENTRY.sizeof())

    if self._debug:
      print(u'Data block file cache entry data:')
      print(hexdump.Hexdump(cache_entry_data))

    try:
      cache_entry_struct = self._CACHE_ENTRY.parse(cache_entry_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse cache entry with error: {0:s}'.format(
          exception))

    cache_entry = CacheEntry()

    cache_entry.hash = cache_entry_struct.get(u'hash')

    cache_entry.next = CacheAddress(cache_entry_struct.get(u'next_address'))
    cache_entry.rankings_node = CacheAddress(cache_entry_struct.get(
        u'rankings_node_address'))

    cache_entry.creation_time = cache_entry_struct.get(u'creation_time')

    byte_array = cache_entry_struct.get(u'key')
    byte_string = b''.join(map(chr, byte_array))
    cache_entry.key, _, _ = byte_string.partition(b'\x00')

    if self._debug:
      print(u'Hash\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(cache_entry.hash))

      print(u'Next address\t\t\t\t\t\t\t\t: {0:s}'.format(
          cache_entry.next.GetDebugString()))

      print(u'Rankings node address\t\t\t\t\t\t\t: {0:s}'.format(
          cache_entry.rankings_node.GetDebugString()))

      print(u'Reuse count\t\t\t\t\t\t\t\t: {0:d}'.format(
          cache_entry_struct.get(u'reuse_count')))

      print(u'Refetch count\t\t\t\t\t\t\t\t: {0:d}'.format(
          cache_entry_struct.get(u'refetch_count')))

      print(u'State\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'state')))

      date_string = (datetime.datetime(1601, 1, 1) +
                     datetime.timedelta(microseconds=cache_entry.creation_time))

      print(u'Creation time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format(
          date_string, cache_entry.creation_time))

      for value in cache_entry_struct.get(u'data_stream_sizes'):
        print(u'Data stream size\t\t\t\t\t\t\t: {0:d}'.format(value))

      cache_address_index = 0
      for value in cache_entry_struct.get(u'data_stream_addresses'):
        cache_address = CacheAddress(value)
        print(u'Data stream address: {0:d}\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      print(u'Flags\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'flags')))

      print(u'Self hash\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'self_hash')))

      try:
        cache_entry_key = cache_entry.key.decode(u'ascii')
      except UnicodeDecodeError:
        logging.warning((
            u'Unable to decode cache entry key at cache address: '
            u'0x{0:08x}. Characters that cannot be decoded will be '
            u'replaced with "?" or "\\ufffd".').format(cache_address.value))
        cache_entry_key = cache_entry.key.decode(u'ascii', errors=u'replace')

      print(u'Key\t\t\t\t\t\t\t\t\t: {0:s}'.format(cache_entry_key))

      # TODO: calculate and verify hash.

      print(u'')

    return cache_entry

  def Close(self):
    """Closes the data block file."""
    if self._file_object_opened_in_object:
      self._file_object.close()
    self._file_object = None

  def Open(self, filename):
    """Opens the data block file.

    Args:
      filename (str): path of the file.
    """
    self._file_object = open(filename, 'rb')
    self._file_object_opened_in_object = True
    self._ReadFileHeader()

  def OpenFileObject(self, file_object):
    """Opens the data block file.

    Args:
      file_object (file): file-like object.
Ejemplo n.º 29
0
class NTFSUsnJrnlParser(interface.FileObjectParser):
  """Parses a NTFS USN change journal."""

  _INITIAL_FILE_OFFSET = None

  NAME = u'usnjrnl'
  DESCRIPTION = u'Parser for NTFS USN change journal ($UsnJrnl).'

  _USN_RECORD_V2 = construct.Struct(
      u'usn_record_v2',
      construct.ULInt32(u'size'),
      construct.ULInt16(u'major_version'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt64(u'file_reference'),
      construct.ULInt64(u'parent_file_reference'),
      construct.ULInt64(u'update_sequence_number'),
      construct.ULInt64(u'update_date_time'),
      construct.ULInt32(u'update_reason_flags'),
      construct.ULInt32(u'update_source_flags'),
      construct.ULInt32(u'security_descriptor_identifier'),
      construct.ULInt32(u'file_attribute_flags'),
      construct.ULInt16(u'name_size'),
      construct.ULInt16(u'name_offset'),
      construct.String(u'name', lambda ctx: ctx.size - 60))

  # TODO: add support for USN_RECORD_V3 when actually seen to be used.

  def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
    """Parses an USN change journal.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      usn_change_journal: An USN change journal object (instance of
                          pyfsntsfs.usn_change_journal).
    """
    if not usn_change_journal:
      return

    usn_record_data = usn_change_journal.read_usn_record()
    while usn_record_data:
      current_offset = usn_change_journal.get_offset()

      try:
        usn_record_struct = self._USN_RECORD_V2.parse(usn_record_data)
      except (IOError, construct.FieldError) as exception:
        parser_mediator.ProduceParseError((
            u'unable to parse USN record at offset: 0x{0:08x} '
            u'with error: {1:s}').format(current_offset, exception))
        continue

      name_offset = usn_record_struct.name_offset - 60
      utf16_stream = usn_record_struct.name[
          name_offset:usn_record_struct.name_size]

      try:
        name_string = utf16_stream.decode(u'utf-16-le')
      except (UnicodeDecodeError, UnicodeEncodeError) as exception:
        name_string = utf16_stream.decode(u'utf-16-le', errors=u'replace')
        parser_mediator.ProduceParseError((
            u'unable to decode USN record name string with error: '
            u'{0:s}. Characters that cannot be decoded will be replaced '
            u'with "?" or "\\ufffd".').format(exception))

      event_object = file_system_events.NTFSUSNChangeEvent(
          usn_record_struct.update_date_time, current_offset,
          name_string, usn_record_struct.file_reference,
          usn_record_struct.update_sequence_number,
          usn_record_struct.update_source_flags,
          usn_record_struct.update_reason_flags,
          file_attribute_flags=usn_record_struct.file_attribute_flags,
          parent_file_reference=usn_record_struct.parent_file_reference)
      parser_mediator.ProduceEvent(event_object)

      usn_record_data = usn_change_journal.read_usn_record()

  def ParseFileObject(self, parser_mediator, file_object, **kwargs):
    """Parses a NTFS $UsnJrnl metadata file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.
    """
    volume = pyfsntfs.volume()
    try:
      volume.open_file_object(file_object)
    except IOError as exception:
      parser_mediator.ProduceParseError(
          u'unable to open NTFS volume with error: {0:s}'.format(exception))

    try:
      usn_change_journal = volume.get_usn_change_journal()
      self._ParseUSNChangeJournal(parser_mediator, usn_change_journal)
    finally:
      volume.close()
Ejemplo n.º 30
0
class WinPrefetchParser(parser.BaseParser):
    """A parser for Windows Prefetch files."""

    NAME = 'prefetch'

    FILE_SIGNATURE = 'SCCA'

    FILE_HEADER_STRUCT = construct.Struct('file_header',
                                          construct.ULInt32('version'),
                                          construct.String('signature', 4),
                                          construct.Padding(4),
                                          construct.ULInt32('file_size'),
                                          construct.String('executable', 60),
                                          construct.ULInt32('prefetch_hash'),
                                          construct.ULInt32('flags'))

    FILE_INFORMATION_V17 = construct.Struct(
        'file_information_v17', construct.Padding(16),
        construct.ULInt32('filenames_offset'),
        construct.ULInt32('filenames_size'),
        construct.ULInt32('volumes_information_offset'),
        construct.ULInt32('number_of_volumes'),
        construct.ULInt32('volumes_information_size'),
        construct.ULInt64('last_run_time'), construct.Padding(16),
        construct.ULInt32('run_count'), construct.Padding(4))

    FILE_INFORMATION_V23 = construct.Struct(
        'file_information_v23', construct.Padding(16),
        construct.ULInt32('filenames_offset'),
        construct.ULInt32('filenames_size'),
        construct.ULInt32('volumes_information_offset'),
        construct.ULInt32('number_of_volumes'),
        construct.ULInt32('volumes_information_size'), construct.Padding(8),
        construct.ULInt64('last_run_time'), construct.Padding(16),
        construct.ULInt32('run_count'), construct.Padding(84))

    FILE_INFORMATION_V26 = construct.Struct(
        'file_information_v26', construct.Padding(16),
        construct.ULInt32('filenames_offset'),
        construct.ULInt32('filenames_size'),
        construct.ULInt32('volumes_information_offset'),
        construct.ULInt32('number_of_volumes'),
        construct.ULInt32('volumes_information_size'), construct.Padding(8),
        construct.ULInt64('last_run_time'),
        construct.ULInt64('last_run_time1'),
        construct.ULInt64('last_run_time2'),
        construct.ULInt64('last_run_time3'),
        construct.ULInt64('last_run_time4'),
        construct.ULInt64('last_run_time5'),
        construct.ULInt64('last_run_time6'),
        construct.ULInt64('last_run_time7'), construct.Padding(16),
        construct.ULInt32('run_count'), construct.Padding(96))

    VOLUME_INFORMATION_V17 = construct.Struct(
        'volume_information_v17', construct.ULInt32('device_path_offset'),
        construct.ULInt32('device_path_number_of_characters'),
        construct.ULInt64('creation_time'), construct.ULInt32('serial_number'),
        construct.Padding(8), construct.ULInt32('directory_strings_offset'),
        construct.ULInt32('number_of_directory_strings'), construct.Padding(4))

    # Note that at the moment for the purpose of this parser
    # the v23 and v26 volume information structures are the same.
    VOLUME_INFORMATION_V23 = construct.Struct(
        'volume_information_v23', construct.ULInt32('device_path_offset'),
        construct.ULInt32('device_path_number_of_characters'),
        construct.ULInt64('creation_time'), construct.ULInt32('serial_number'),
        construct.Padding(8), construct.ULInt32('directory_strings_offset'),
        construct.ULInt32('number_of_directory_strings'),
        construct.Padding(68))

    def _ParseFileHeader(self, file_object):
        """Parses the file header.

    Args:
      file_object: A file-like object to read data from.

    Returns:
      The file header construct object.
    """
        try:
            file_header = self.FILE_HEADER_STRUCT.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse file header with error: {0:s}'.format(
                    exception))

        if not file_header:
            raise errors.UnableToParseFile('Unable to read file header')

        if file_header.get('signature', None) != self.FILE_SIGNATURE:
            raise errors.UnableToParseFile('Unsupported file signature')

        return file_header

    def _ParseFileInformation(self, file_object, format_version):
        """Parses the file information.

    Args:
      file_object: A file-like object to read data from.
      format_version: The format version.

    Returns:
      The file information construct object.
    """
        try:
            if format_version == 17:
                file_information = self.FILE_INFORMATION_V17.parse_stream(
                    file_object)
            elif format_version == 23:
                file_information = self.FILE_INFORMATION_V23.parse_stream(
                    file_object)
            elif format_version == 26:
                file_information = self.FILE_INFORMATION_V26.parse_stream(
                    file_object)
            else:
                file_information = None
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse file information v{0:d} with error: {1:s}'.
                format(format_version, exception))

        if not file_information:
            raise errors.UnableToParseFile('Unable to read file information')
        return file_information

    def _ParseFilenames(self, file_object, file_information):
        """Parses the filenames.

    Args:
      file_object: A file-like object to read data from.
      file_information: The file information construct object.

    Returns:
      A list of filenames.
    """
        filenames_offset = file_information.get('filenames_offset', 0)
        filenames_size = file_information.get('filenames_size', 0)

        if filenames_offset > 0 and filenames_size > 0:
            file_object.seek(filenames_offset, os.SEEK_SET)
            filenames_data = file_object.read(filenames_size)
            filenames = binary.ArrayOfUt16StreamCopyToString(filenames_data)

        else:
            filenames = []

        return filenames

    def _ParseVolumesInformationSection(self, file_object, format_version,
                                        file_information):
        """Parses the volumes information section.

    Args:
      file_object: A file-like object to read data from.
      format_version: The format version.
      file_information: The file information construct object.

    Yields:
      A volume information construct object.
    """
        volumes_information_offset = file_information.get(
            'volumes_information_offset', 0)

        if volumes_information_offset > 0:
            number_of_volumes = file_information.get('number_of_volumes', 0)
            file_object.seek(volumes_information_offset, os.SEEK_SET)

            while number_of_volumes > 0:
                try:
                    if format_version == 17:
                        yield self.VOLUME_INFORMATION_V17.parse_stream(
                            file_object)
                    else:
                        yield self.VOLUME_INFORMATION_V23.parse_stream(
                            file_object)
                except (IOError, construct.FieldError) as exception:
                    raise errors.UnableToParseFile((
                        u'Unable to parse volume information v{0:d} with error: '
                        u'{1:s}').format(format_version, exception))

                number_of_volumes -= 1

    def _ParseVolumeDevicePath(self, file_object, file_information,
                               volume_information):
        """Parses the volume device path.

    This function expects the current offset of the file-like object to point
    as the end of the volume information structure.

    Args:
      file_object: A file-like object to read data from.
      file_information: The file information construct object.
      volume_information: The volume information construct object.

    Returns:
      A Unicode string containing the device path or None if not available.
    """
        volumes_information_offset = file_information.get(
            'volumes_information_offset', 0)

        device_path = None
        if volumes_information_offset > 0:
            device_path_offset = volume_information.get(
                'device_path_offset', 0)
            device_path_size = 2 * volume_information.get(
                'device_path_number_of_characters', 0)

            if device_path_offset >= 36 and device_path_size > 0:
                device_path_offset += volumes_information_offset
                current_offset = file_object.tell()

                file_object.seek(device_path_offset, os.SEEK_SET)
                device_path = binary.ReadUtf16Stream(
                    file_object, byte_size=device_path_size)

                file_object.seek(current_offset, os.SEEK_SET)

        return device_path

    def Parse(self, file_entry):
        """Extracts events from a Windows Prefetch file.

    Args:
      file_entry: A file entry object (instance of dfvfs.FileEntry).

    Yields:
      Event objects (instances of EventObject) of the extracted events.
    """
        file_object = file_entry.GetFileObject()
        file_header = self._ParseFileHeader(file_object)

        format_version = file_header.get('version', None)
        if format_version not in [17, 23, 26]:
            raise errors.UnableToParseFile(
                u'Unsupported format version: {0:d}'.format(format_version))

        file_information = self._ParseFileInformation(file_object,
                                                      format_version)
        mapped_files = self._ParseFilenames(file_object, file_information)

        executable = binary.Ut16StreamCopyToString(
            file_header.get('executable', u''))

        volume_serial_numbers = []
        volume_device_paths = []
        prefetch_events = []
        path = u''

        for volume_information in self._ParseVolumesInformationSection(
                file_object, format_version, file_information):
            volume_serial_number = volume_information.get('serial_number', 0)
            volume_device_path = self._ParseVolumeDevicePath(
                file_object, file_information, volume_information)

            volume_serial_numbers.append(volume_serial_number)
            volume_device_paths.append(volume_device_path)

            timestamp = volume_information.get('creation_time', 0)
            if timestamp:
                prefetch_events.append(
                    (timestamp, eventdata.EventTimestamp.CREATION_TIME))

            for mapped_file in mapped_files:
                if (mapped_file.startswith(volume_device_path)
                        and mapped_file.endswith(executable)):
                    _, _, path = mapped_file.partition(volume_device_path)

        for prefetch_timestamp, prefetch_description in prefetch_events:
            yield WinPrefetchExecutionEvent(prefetch_timestamp,
                                            prefetch_description, file_header,
                                            file_information, mapped_files,
                                            path, volume_serial_numbers,
                                            volume_device_paths)

        timestamp = file_information.get('last_run_time', 0)
        if timestamp:
            yield WinPrefetchExecutionEvent(
                timestamp, eventdata.EventTimestamp.LAST_RUNTIME, file_header,
                file_information, mapped_files, path, volume_serial_numbers,
                volume_device_paths)

        # Check for the 7 older last run time values available in v26.
        if format_version == 26:
            for last_run_time_index in range(1, 8):
                last_run_time_identifier = 'last_run_time{0:d}'.format(
                    last_run_time_index)

                timestamp = file_information.get(last_run_time_identifier, 0)
                if timestamp:
                    yield WinPrefetchExecutionEvent(
                        timestamp, u'Previous {0:s}'.format(
                            eventdata.EventTimestamp.LAST_RUNTIME),
                        file_header, file_information, mapped_files, path,
                        volume_serial_numbers, volume_device_paths)

        file_object.close()