Exemplo n.º 1
0
 def _decode(self, obj, context):
     return '{:08x}-{:04x}-{:04x}-{:04x}-{:s}'.format(
         construct.ULInt32('foo').parse(obj[0:4]),
         construct.ULInt16('foo').parse(obj[4:6]),
         construct.ULInt16('foo').parse(obj[6:8]),
         construct.UBInt16('foo').parse(obj[8:10]),
         obj[10:16].encode('hex'))
Exemplo n.º 2
0
def MakeGenericParamRow():
    return construct.Struct('GenericParamRow',
        construct.ULInt16('Number'),
        construct.ULInt16('Flags'),
        MDTag.TypeOrMethodDef.parse('Owner'),
        MDTag.StringHeapRef.parse('Name')
    )
Exemplo n.º 3
0
def decode_itempos(itempos):
    """
    Decodes a single itempos and returns extracted information
    """
    itempos_io = StringIO.StringIO(itempos)
    itempos_struct = construct.Struct("itempos",
                                      construct.ULInt16("itempos_size"),
                                      construct.Padding(2),
                                      construct.ULInt32("filesize"),
                                      construct.Bytes("dos_date", 2),
                                      construct.Bytes("dos_time", 2),
                                      construct.ULInt16("file_attr"),
                                      construct.CString("filename")
                                      )
    parse_res = itempos_struct.parse_stream(itempos_io)
    if itempos_io.pos % 2 == 1:
        itempos_io.read(1)
    ext_struct = construct.Struct("ext",
                                  construct.ULInt16("ext_size"),
                                  construct.ULInt16("ext_version")
                                  )
    parse_ext = ext_struct.parse_stream(itempos_io)
    if parse_ext["ext_version"] >= 0x3:
        itempos2_struct = construct.Struct("itempos2",
                                           construct.Padding(2),  # 0004
                                           construct.Padding(2),  # BEEF
                                           construct.Bytes("creation_dos_date", 2),
                                           construct.Bytes("creation_dos_time", 2),
                                           construct.Bytes("access_dos_date", 2),
                                           construct.Bytes("access_dos_time", 2),
                                           construct.Padding(4)
                                           )
        parse_res2 = itempos2_struct.parse_stream(itempos_io)
    unicode_filename = ""
    if parse_ext["ext_version"] >= 0x7:
        itempos3_struct = construct.Struct("itempos3",
                                           construct.ULInt64("file_ref"),
                                           construct.Padding(8),
                                           construct.Padding(2),
                                           construct.Padding(4)
                                           )
        parse_res3 = itempos3_struct.parse_stream(itempos_io)
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char
    elif parse_ext["ext_version"] >= 0x3:
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char

    timestamp_modified = dosdate(parse_res["dos_date"], parse_res["dos_time"]).strftime("%d/%m/%Y %H:%M:%S")
    timestamp_created = dosdate(parse_res2["creation_dos_date"], parse_res2["creation_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")
    timestamp_access = dosdate(parse_res2["access_dos_date"], parse_res2["access_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")

    return [unicode(parse_res["itempos_size"]), unicode(parse_res["filesize"]), timestamp_modified,
            parse_res["filename"], timestamp_created, timestamp_access, unicode_filename]
Exemplo n.º 4
0
def MakeMethodRow():
    return construct.Struct('MethodRow',
        MakeRva('RVA'),
        construct.ULInt16('ImplFlags'),
        construct.ULInt16('Flags'),
        MDTag.StringHeapRef.parse('Name'),
        MDTag.BlobHeapRef.parse('Signature'),
        MDTag.ParamRef.parse('ParamList')
    )
Exemplo n.º 5
0
def MakeAssemblyRefRow():
    return construct.Struct('AssemblyRefRow',
        construct.ULInt16('MajorVersion'),
        construct.ULInt16('MinorVersion'),
        construct.ULInt16('BuildNumber'),
        construct.ULInt16('RevisionNumber'),
        construct.ULInt32('Flags'),
        MDTag.BlobHeapRef.parse('PublicKeyOrToken'),
        MDTag.StringHeapRef.parse('Name'),
        MDTag.StringHeapRef.parse('Culture'),
        MDTag.BlobHeapRef.parse('HashValue')
    )
Exemplo n.º 6
0
def ReadVtableFixups(ClrHeader):
    VTableFixup = construct.Struct('VTableFixup',
        MakeRva('RVA'),
        construct.ULInt16('Count'),
        construct.FlagsEnum(construct.ULInt16('Type'),
            COR_VTABLE_32BIT                           = 0x01, # V-table slots are 32-bits in size.
            COR_VTABLE_64BIT                           = 0x02, # V-table slots are 64-bits in size.
            COR_VTABLE_FROM_UNMANAGED                  = 0x04, # If set, transition from unmanaged.
            COR_VTABLE_FROM_UNMANAGED_RETAIN_APPDOMAIN = 0x08, # If set, transition from unmanaged with keeping the current appdomain.
            COR_VTABLE_CALL_MOST_DERIVED               = 0x10, # Call most derived method described by
        )
    )
    numFixups = ClrHeader.VTableFixups.Size / VTableFixup.sizeof()
    VTableFixups = construct.Array(numFixups, VTableFixup)
    if numFixups == 0: return []
    return VTableFixups.parse(idc.GetManyBytes(clrHeader.VTableFixups.VA, VTableFixups.sizeof()))
Exemplo n.º 7
0
def MakeImplMapRow():
    return construct.Struct('ImplMapRow',
        construct.ULInt16('MappingFlags'),
        MDTag.MemberForwarded.parse('MemberForwarded'),
        MDTag.StringHeapRef.parse('ImportName'),
        MDTag.ModuleRefRId.parse('ImportScope')
    )
Exemplo n.º 8
0
def MakeModuleRow():
    return construct.Struct('ModuleRow',
        construct.ULInt16('Generation'),
        MDTag.StringHeapRef.parse('Name'),
        MDTag.GuidHeapRef.parse('MVId'),
        MDTag.GuidHeapRef.parse('EnCId'),
        MDTag.GuidHeapRef.parse('EnCBaseId')
    )
Exemplo n.º 9
0
 def __init__(s):
     s.header_cmd0 = construct.Struct('CMD0Header',
         construct.UBInt8('magic'),
         construct.UBInt8('unk_0'),
         construct.UBInt8('unk_1'),
         construct.UBInt8('unk_2'),
         construct.UBInt8('unk_3'),
         construct.UBInt8('flags'),
         construct.UBInt8('id_primary'),
         construct.UBInt8('id_secondary'),
         construct.UBInt16('error_code'),
         construct.UBInt16('payload_size_cmd0')
     )
     s.header_cmd1 = construct.Struct('CMD1Header',
         construct.Padding(48)
     )
     s.header_cmd2 = construct.Struct('CMD2Header',
         construct.ULInt16('JDN_base'),
         construct.Padding(2),
         construct.ULInt32('seconds')
     )
     s.header = construct.Struct('CMDHeader',
         construct.ULInt16('packet_type'),
         construct.ULInt16('cmd_id'),
         construct.ULInt16('payload_size'),
         construct.ULInt16('seq_id'),
         construct.Switch('cmd_hdr', lambda ctx: ctx.cmd_id,
             {
                 0 : construct.If(lambda ctx: ctx.payload_size >= s.header_cmd0.sizeof(), construct.Embed(s.header_cmd0)),
                 1 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd1.sizeof(), construct.Embed(s.header_cmd1)),
                 2 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd2.sizeof(), construct.Embed(s.header_cmd2))
             },
             default = construct.Pass
         )
     )
     s.cmd_handlers = {
         0 : s.cmd0,
         1 : s.cmd1,
         2 : s.cmd2
     }
     s.cmd0_handlers = {
         5 : { 6 : s.cmd0_5_6 },
     }
Exemplo n.º 10
0
  def _ReadFileHeader(self, file_object):
    """Reads the file header.

    Args:
      file_object: the file-like object to read from.

    Raises:
      FileFormatError: if file format related errors are detected.
    """
    file_object.seek(0, os.SEEK_SET)
    file_header = self._FILE_HEADER_STRUCT.parse_stream(file_object)
    self._compressed_data_offset = file_object.get_offset()

    if file_header.signature != self._FILE_SIGNATURE:
      raise errors.FileFormatError(
          u'Unsuppored file signature: 0x{0:04x}.'.format(
              file_header.signature))

    if file_header.compression_method != self._COMPRESSION_METHOD_DEFLATE:
      raise errors.FileFormatError(
          u'Unsuppored compression method: {0:d}.'.format(
              file_header.compression_method))

    self.modification_time = file_header.modification_time
    self.operating_system = file_header.operating_system

    if file_header.flags & self._FLAG_FEXTRA:
      extra_field_data_size = construct.ULInt16(
          u'extra_field_data_size').parse_stream(file_object)
      file_object.seek(extra_field_data_size, os.SEEK_CUR)
      self._compressed_data_offset += 2 + extra_field_data_size

    if file_header.flags & self._FLAG_FNAME:
      # Since encoding is set construct will convert the C string to Unicode.
      # Note that construct 2 does not support the encoding to be a Unicode
      # string.
      self.original_filename = construct.CString(
          u'original_filename', encoding='iso-8859-1').parse_stream(
              file_object)
      self._compressed_data_offset = file_object.get_offset()

    if file_header.flags & self._FLAG_FCOMMENT:
      # Since encoding is set construct will convert the C string to Unicode.
      # Note that construct 2 does not support the encoding to be a Unicode
      # string.
      self.comment = construct.CString(
          u'comment', encoding='iso-8859-1').parse_stream(file_object)
      self._compressed_data_offset = file_object.get_offset()

    if file_header.flags & self._FLAG_FHCRC:
      self._compressed_data_offset += 2

    self._compressed_data_size = (
        file_object.get_size() - (self._compressed_data_offset + 8))
Exemplo n.º 11
0
 def _decode(self, obj, context):
     el = [
         int(construct.Byte('foo').parse(obj[0:1])),
         (int(construct.ULInt32('foo').parse(obj[1:5])) +
             (int(construct.ULInt16('foo').parse(obj[5:7])) << 32))]
     
     auth_sub_count = construct.Byte('foo').parse(obj[7:8])
     for i in range(0, auth_sub_count):
         el.append(construct.ULInt32('foo').parse(obj[8+i*4:]))
         
     return 'S-' + '-'.join([str(x) for x in el])
Exemplo n.º 12
0
class Wdigest_x64(Wdigest, Mimikatz_x64):
  """TODO: add description."""

  WDIGEST_LIST_ENTRY = construct.Struct('WdigestListEntry',
      construct.ULInt64('previous'),
      construct.ULInt64('next'),
      construct.ULInt32('usage_count'),
      construct.ULInt32('align1'),
      construct.ULInt64('this_entry'),
      construct.ULInt64('luid'),
      construct.ULInt64('flag'),
      construct.ULInt16('user_len'),
      construct.ULInt16('user_max_len'),
      construct.ULInt32('align2'),
      construct.ULInt64('user_string_ptr'),
      construct.ULInt16('domain_len'),
      construct.ULInt16('domain_max_len'),
      construct.ULInt32('align3'),
      construct.ULInt64('domain_string_ptr'),
      construct.ULInt16('password_len'),
      construct.ULInt16('password_max_len'),
      construct.ULInt32('align4'),
      construct.ULInt64('password_encrypted_ptr'))
  
  def __init__(self, lsass_task, credentials_obj):
    Mimikatz_x64.__init__(self, lsass_task)
    Wdigest.__init__(self, credentials_obj)
Exemplo n.º 13
0
    def _ReadAndParseHeader(self, file_object):
        """Reads the member header and sets relevant member values.

    Args:
      file_object (FileIO): file-like object to read from.

    Raises:
      FileFormatError: if file format related errors are detected.
    """
        member_header = self._MEMBER_HEADER_STRUCT.parse_stream(file_object)

        if member_header.signature != self._GZIP_SIGNATURE:
            raise errors.FileFormatError(
                'Unsupported file signature: 0x{0:04x}.'.format(
                    member_header.signature))

        if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE:
            raise errors.FileFormatError(
                'Unsupported compression method: {0:d}.'.format(
                    member_header.compression_method))

        self.modification_time = member_header.modification_time
        self.operating_system = member_header.operating_system

        if member_header.flags & self._FLAG_FEXTRA:
            extra_field_data_size = construct.ULInt16(
                'extra_field_data_size').parse_stream(file_object)
            file_object.seek(extra_field_data_size, os.SEEK_CUR)

        if member_header.flags & self._FLAG_FNAME:
            # Since encoding is set construct will convert the C string to Unicode.
            # Note that construct 2 does not support the encoding to be a Unicode
            # string.
            self.original_filename = construct.CString(
                'original_filename',
                encoding=b'iso-8859-1').parse_stream(file_object)

        if member_header.flags & self._FLAG_FCOMMENT:
            # Since encoding is set construct will convert the C string to Unicode.
            # Note that construct 2 does not support the encoding to be a Unicode
            # string.
            self.comment = construct.CString(
                'comment', encoding=b'iso-8859-1').parse_stream(file_object)

        if member_header.flags & self._FLAG_FHCRC:
            file_object.read(2)
Exemplo n.º 14
0
    def _parse(self):
        try:
            header = self._IPL_HEADER.parse(self._raw)
        except (construct.ConstructError, UnicodeDecodeError) as e:
            raise InvalidIPLError('Invalid IPL structure: {0}\n{1}'.format(
                e, hexdump(self._raw[:0x200])))

        try:
            # IPL's code section is usually contained is the first 9 sectors. The remaining sectors are filled with
            # padding but it appears that the last (15th) sector can sometimes hold data not related to the boot process
            # and we need to exclude that from hash calculation.
            invariantCode = self._raw[:14 * 512]
        except IndexError:
            raise InvalidIPLError(
                'Invalid sample size for IPL: {0} (should be 15 * 512-bytes sectors)'
                .format(len(self._raw)))

        expectedLoader = None

        # Starting with NT 6.2, IPL has a localized string that must be excluded from hash computation.
        # The difference between these two kinds of IPL can be told from the instruction located at 0x56 :
        # a Jump Short (EB) in case of IPL<6.2 or a Jump Near (E9) otherwise
        if header.signature == 'BOOTMGR' and self._raw[0x56].encode(
                'hex').upper() == 'E9':
            # The offset of the localized string seems to be stored in a DWORD at 0x117 (just before the beginning
            # of the assembly code). But the value seems to be an offset relative to the start of the whole
            # boot record (including the VBR) and not just the IPL.
            # Therefore we need to substract 0x200 to get the offset inside the IPL.
            strOffset = construct.ULInt16('offset').parse(
                self._raw[0x117:]) - 0x200
            # Exclude from hash calculation everything between the string offset and the beginning of code
            invariantCode = invariantCode[:strOffset] + invariantCode[0x119:]
            expectedLoader = 'NT6.2+ IPL'

        codeHash = hashlib.sha256(invariantCode)
        self._matchHash(codeHash, expectedLoader)

        # If no whitelisted signature matched, try some simple heuristics to flag this IPL as malicious
        # Note that the self._checkCode method is only given the "stripped" code section to help the disassembling.
        # This will obviously leads to broken offsets, but it doesn't matter since the heuristics don't use them.
        if len(self._signature) == 0:
            self._checkCode(invariantCode)
Exemplo n.º 15
0
class UtmpxParser(interface.FileObjectParser):
    """Parser for UTMPX files."""

    NAME = 'utmpx'
    DESCRIPTION = 'Parser for UTMPX files.'

    # INFO: Type is suppose to be a short (2 bytes),
    # however if we analyze the file it is always
    # byte follow by 3 bytes with \x00 value.
    _UTMPX_ENTRY = construct.Struct('utmpx_mac', construct.String('user', 256),
                                    construct.ULInt32('id'),
                                    construct.String('tty_name', 32),
                                    construct.ULInt32('pid'),
                                    construct.ULInt16('status_type'),
                                    construct.ULInt16('unknown'),
                                    construct.ULInt32('timestamp'),
                                    construct.ULInt32('microseconds'),
                                    construct.String('hostname', 256),
                                    construct.Padding(64))

    _UTMPX_ENTRY_SIZE = _UTMPX_ENTRY.sizeof()

    _STATUS_TYPE_SIGNATURE = 10

    def _ReadEntry(self, parser_mediator, file_object):
        """Reads an UTMPX entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      bool: True if the UTMPX entry was successfully read.
    """
        data = file_object.read(self._UTMPX_ENTRY_SIZE)
        if len(data) != self._UTMPX_ENTRY_SIZE:
            return False

        try:
            entry_struct = self._UTMPX_ENTRY.parse(data)
        except (IOError, construct.FieldError) as exception:
            logging.warning(
                'Unable to parse MacOS UTMPX entry with error: {0!s}'.format(
                    exception))
            return False

        user, _, _ = entry_struct.user.partition(b'\x00')
        if not user:
            user = '******'

        terminal, _, _ = entry_struct.tty_name.partition(b'\x00')
        if not terminal:
            terminal = 'N/A'

        computer_name, _, _ = entry_struct.hostname.partition(b'\x00')
        if not computer_name:
            computer_name = 'localhost'

        event_data = UtmpxMacOSEventData()
        event_data.computer_name = computer_name
        event_data.offset = file_object.tell()
        event_data.status_type = entry_struct.status_type
        event_data.terminal = terminal
        event_data.user = user

        timestamp = (entry_struct.timestamp *
                     1000000) + entry_struct.microseconds
        date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
            timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_START)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        return True

    def _VerifyStructure(self, file_object):
        """Verify that we are dealing with an UTMPX entry.

    Args:
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      bool: True if it is a UTMPX entry or False otherwise.
    """
        # First entry is a SIGNAL entry of the file ("header").
        try:
            header_struct = self._UTMPX_ENTRY.parse_stream(file_object)
        except (IOError, construct.FieldError):
            return False
        user, _, _ = header_struct.user.partition(b'\x00')

        # The UTMPX_ENTRY structure will often successfully compile on various
        # structures, such as binary plist files, and thus we need to do some
        # additional validation. The first one is to check if the user name
        # can be converted into a Unicode string, otherwise we can assume
        # we are dealing with non UTMPX data.
        try:
            user.decode('utf-8')
        except UnicodeDecodeError:
            return False

        if user != b'utmpx-1.00':
            return False
        if header_struct.status_type != self._STATUS_TYPE_SIGNATURE:
            return False
        if (header_struct.timestamp != 0 or header_struct.microseconds != 0
                or header_struct.pid != 0):
            return False
        tty_name, _, _ = header_struct.tty_name.partition(b'\x00')
        hostname, _, _ = header_struct.hostname.partition(b'\x00')
        if tty_name or hostname:
            return False

        return True

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses an UTMPX file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        if not self._VerifyStructure(file_object):
            raise errors.UnableToParseFile('The file is not an UTMPX file.')

        while self._ReadEntry(parser_mediator, file_object):
            pass
Exemplo n.º 16
0
class SAMUsersWindowsRegistryPlugin(interface.WindowsRegistryPlugin):
  """Windows Registry plugin for SAM Users Account information."""

  NAME = u'windows_sam_users'
  DESCRIPTION = u'Parser for SAM Users and Names Registry keys.'

  FILTERS = frozenset([
      interface.WindowsRegistryKeyPathFilter(
          u'HKEY_LOCAL_MACHINE\\SAM\\Domains\\Account\\Users')])

  F_VALUE_STRUCT = construct.Struct(
      u'f_struct',
      construct.Padding(8),
      construct.ULInt64(u'last_login'),
      construct.Padding(8),
      construct.ULInt64(u'password_reset'),
      construct.Padding(16),
      construct.ULInt16(u'rid'),
      construct.Padding(16),
      construct.ULInt8(u'login_count'))

  V_VALUE_HEADER = construct.Struct(
      u'v_header',
      construct.Array(11, construct.ULInt32(u'values')))

  V_VALUE_HEADER_SIZE = 0xCC

  _SOURCE_APPEND = u'User Account Information'

  def _ParseFValue(self, key):
    """Parses F value and returns parsed F data construct object.

    Args:
      key: Registry key (instance of dfwinreg.WinRegistryKey).

    Returns:
      f_data: Construct parsed F value containing rid, login count,
              and timestamp information.
    """
    f_value = key.GetValueByName(u'F')
    if not f_value:
      logging.error(u'Unable to locate F Value in key.')
      return
    try:
      f_data = self.F_VALUE_STRUCT.parse(f_value.data)
    except construct.FieldError as exception:
      logging.error(
          u'Unable to extract F value data: {:s}'.format(exception))
      return
    return f_data

  def _ParseVValue(self, key):
    """Parses V value and returns name, fullname, and comments data.

    Args:
      key: Registry key (instance of dfwinreg.WinRegistryKey).

    Returns:
      name: Name data parsed with name start and length values.
      fullname: Fullname data parsed with fullname start and length values.
      comments: Comments data parsed with comments start and length values.
    """
    v_value = key.GetValueByName(u'V')
    if not v_value:
      logging.error(u'Unable to locate V Value in key.')
      return
    try:
      structure = self.V_VALUE_HEADER.parse(v_value.data)
    except construct.FieldError as exception:
      logging.error(
          u'Unable to extract V value header data with error: {0:s}'.format(
              exception))
      return
    name_offset = structure.values()[0][3] + self.V_VALUE_HEADER_SIZE
    full_name_offset = structure.values()[0][6] + self.V_VALUE_HEADER_SIZE
    comments_offset = structure.values()[0][9] + self.V_VALUE_HEADER_SIZE
    name_raw = v_value.data[
        name_offset:name_offset + structure.values()[0][4]]
    full_name_raw = v_value.data[
        full_name_offset:full_name_offset + structure.values()[0][7]]
    comments_raw = v_value.data[
        comments_offset:comments_offset + structure.values()[0][10]]
    name = binary.ReadUTF16(name_raw)
    full_name = binary.ReadUTF16(full_name_raw)
    comments = binary.ReadUTF16(comments_raw)
    return name, full_name, comments

  def GetEntries(self, parser_mediator, registry_key, **kwargs):
    """Collect data from Users and Names and produce event objects.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      registry_key: A Windows Registry key (instance of
                    dfwinreg.WinRegistryKey).
    """
    name_key = registry_key.GetSubkeyByName(u'Names')
    if not name_key:
      parser_mediator.ProduceParseError(u'Unable to locate Names key.')
      return
    values = [(v.name, v.last_written_time) for v in name_key.GetSubkeys()]

    name_dict = dict(values)

    for subkey in registry_key.GetSubkeys():
      if subkey.name == u'Names':
        continue

      parsed_v_value = self._ParseVValue(subkey)
      if not parsed_v_value:
        parser_mediator.ProduceParseError(
            u'Unable to parse SAM key: {0:s} V value.'.format(subkey))
        return

      username = parsed_v_value[0]
      full_name = parsed_v_value[1]
      comments = parsed_v_value[2]

      values_dict = {u'user_guid': subkey.name}

      if username:
        values_dict[u'username'] = username
      if full_name:
        values_dict[u'full_name'] = full_name
      if comments:
        values_dict[u'comments'] = comments
      if name_dict:
        account_create_time = name_dict.get(username, 0)
      else:
        account_create_time = 0

      f_data = self._ParseFValue(subkey)
      values_dict[u'account_rid'] = f_data.rid
      values_dict[u'login_count'] = f_data.login_count

      if account_create_time > 0:
        event_object = windows_events.WindowsRegistryEvent(
            account_create_time, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.ACCOUNT_CREATED,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)

      if f_data.last_login > 0:
        event_object = windows_events.WindowsRegistryEvent(
            f_data.last_login, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.LAST_LOGIN_TIME,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)

      if f_data.password_reset > 0:
        event_object = windows_events.WindowsRegistryEvent(
            f_data.password_reset, registry_key.path, values_dict,
            usage=eventdata.EventTimestamp.LAST_PASSWORD_RESET,
            offset=registry_key.offset, source_append=self._SOURCE_APPEND)
        parser_mediator.ProduceEvent(event_object)
Exemplo n.º 17
0
class WinJobParser(interface.FileObjectParser):
    """Parse Windows Scheduled Task files for job events."""

    NAME = u'winjob'
    DESCRIPTION = u'Parser for Windows Scheduled Task job (or At-job) files.'

    _EMPTY_SYSTEM_TIME_TUPLE = (0, 0, 0, 0, 0, 0, 0, 0)

    _PRODUCT_VERSIONS = {
        0x0400: u'Windows NT 4.0',
        0x0500: u'Windows 2000',
        0x0501: u'Windows XP',
        0x0600: u'Windows Vista',
        0x0601: u'Windows 7',
        0x0602: u'Windows 8',
        0x0603: u'Windows 8.1',
        0x0a00: u'Windows 10',
    }

    _JOB_FIXED_LENGTH_SECTION_STRUCT = construct.Struct(
        u'job_fixed_length_section', construct.ULInt16(u'product_version'),
        construct.ULInt16(u'format_version'), construct.Bytes(u'job_uuid', 16),
        construct.ULInt16(u'application_length_offset'),
        construct.ULInt16(u'trigger_offset'),
        construct.ULInt16(u'error_retry_count'),
        construct.ULInt16(u'error_retry_interval'),
        construct.ULInt16(u'idle_deadline'), construct.ULInt16(u'idle_wait'),
        construct.ULInt32(u'priority'), construct.ULInt32(u'max_run_time'),
        construct.ULInt32(u'exit_code'), construct.ULInt32(u'status'),
        construct.ULInt32(u'flags'),
        construct.Struct(u'last_run_time', construct.ULInt16(u'year'),
                         construct.ULInt16(u'month'),
                         construct.ULInt16(u'weekday'),
                         construct.ULInt16(u'day'),
                         construct.ULInt16(u'hours'),
                         construct.ULInt16(u'minutes'),
                         construct.ULInt16(u'seconds'),
                         construct.ULInt16(u'milliseconds')))

    # Using Construct's utf-16 encoding here will create strings with their
    # null terminators exposed. Instead, we'll read these variables raw and
    # convert them using Plaso's ReadUTF16() for proper formatting.
    _JOB_VARIABLE_STRUCT = construct.Struct(
        u'job_variable_length_section',
        construct.ULInt16(u'running_instance_count'),
        construct.ULInt16(u'application_length'),
        construct.String(u'application',
                         lambda ctx: ctx.application_length * 2),
        construct.ULInt16(u'parameter_length'),
        construct.String(u'parameter', lambda ctx: ctx.parameter_length * 2),
        construct.ULInt16(u'working_directory_length'),
        construct.String(u'working_directory',
                         lambda ctx: ctx.working_directory_length * 2),
        construct.ULInt16(u'username_length'),
        construct.String(u'username', lambda ctx: ctx.username_length * 2),
        construct.ULInt16(u'comment_length'),
        construct.String(u'comment', lambda ctx: ctx.comment_length * 2),
        construct.ULInt16(u'userdata_length'),
        construct.String(u'userdata', lambda ctx: ctx.userdata_length),
        construct.ULInt16(u'reserved_length'),
        construct.String(u'reserved', lambda ctx: ctx.reserved_length),
        construct.ULInt16(u'number_of_triggers'))

    _TRIGGER_STRUCT = construct.Struct(u'trigger', construct.ULInt16(u'size'),
                                       construct.ULInt16(u'reserved1'),
                                       construct.ULInt16(u'start_year'),
                                       construct.ULInt16(u'start_month'),
                                       construct.ULInt16(u'start_day'),
                                       construct.ULInt16(u'end_year'),
                                       construct.ULInt16(u'end_month'),
                                       construct.ULInt16(u'end_day'),
                                       construct.ULInt16(u'start_hour'),
                                       construct.ULInt16(u'start_minute'),
                                       construct.ULInt32(u'duration'),
                                       construct.ULInt32(u'interval'),
                                       construct.ULInt32(u'trigger_flags'),
                                       construct.ULInt32(u'trigger_type'),
                                       construct.ULInt16(u'trigger_arg0'),
                                       construct.ULInt16(u'trigger_arg1'),
                                       construct.ULInt16(u'trigger_arg2'),
                                       construct.ULInt16(u'trigger_padding'),
                                       construct.ULInt16(u'trigger_reserved2'),
                                       construct.ULInt16(u'trigger_reserved3'))

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows job file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            header_struct = self._JOB_FIXED_LENGTH_SECTION_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse fixed-length section with error: {0:s}'.
                format(exception))

        if not header_struct.product_version in self._PRODUCT_VERSIONS:
            raise errors.UnableToParseFile(
                u'Unsupported product version in: 0x{0:04x}'.format(
                    header_struct.product_version))

        if not header_struct.format_version == 1:
            raise errors.UnableToParseFile(
                u'Unsupported format version in: {0:d}'.format(
                    header_struct.format_version))

        try:
            job_variable_struct = self._JOB_VARIABLE_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse variable-length section with error: {0:s}'.
                format(exception))

        event_data = WinJobEventData()
        event_data.application = binary.ReadUTF16(
            job_variable_struct.application)
        event_data.comment = binary.ReadUTF16(job_variable_struct.comment)
        event_data.parameters = binary.ReadUTF16(job_variable_struct.parameter)
        event_data.username = binary.ReadUTF16(job_variable_struct.username)
        event_data.working_directory = binary.ReadUTF16(
            job_variable_struct.working_directory)

        systemtime_struct = header_struct.last_run_time
        system_time_tuple = (systemtime_struct.year, systemtime_struct.month,
                             systemtime_struct.weekday, systemtime_struct.day,
                             systemtime_struct.hours,
                             systemtime_struct.minutes,
                             systemtime_struct.seconds,
                             systemtime_struct.milliseconds)

        date_time = None
        if system_time_tuple != self._EMPTY_SYSTEM_TIME_TUPLE:
            try:
                date_time = dfdatetime_systemtime.Systemtime(
                    system_time_tuple=system_time_tuple)
            except ValueError:
                parser_mediator.ProduceExtractionError(
                    u'invalid last run time: {0!s}'.format(system_time_tuple))

        if date_time:
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for index in range(job_variable_struct.number_of_triggers):
            try:
                trigger_struct = self._TRIGGER_STRUCT.parse_stream(file_object)
            except (IOError, construct.FieldError) as exception:
                parser_mediator.ProduceExtractionError(
                    u'unable to parse trigger: {0:d} with error: {1:s}'.format(
                        index, exception))
                return

            event_data.trigger_type = trigger_struct.trigger_type

            time_elements_tuple = (trigger_struct.start_year,
                                   trigger_struct.start_month,
                                   trigger_struct.start_day,
                                   trigger_struct.start_hour,
                                   trigger_struct.start_minute, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_MINUTE
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger start time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to start',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_elements_tuple = (trigger_struct.end_year,
                                   trigger_struct.end_month,
                                   trigger_struct.end_day, 0, 0, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_DAY
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger end time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to end',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
Exemplo n.º 18
0
class CPIOArchiveFile(object):
    """CPIO archive file.

  Attributes:
    file_format (str): CPIO file format.
  """
    # pylint: disable=no-member

    _CPIO_SIGNATURE_BINARY_BIG_ENDIAN = b'\x71\xc7'
    _CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN = b'\xc7\x71'
    _CPIO_SIGNATURE_PORTABLE_ASCII = b'070707'
    _CPIO_SIGNATURE_NEW_ASCII = b'070701'
    _CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM = b'070702'

    _CPIO_BINARY_BIG_ENDIAN_FILE_ENTRY_STRUCT = construct.Struct(
        'cpio_binary_big_endian_file_entry', construct.UBInt16('signature'),
        construct.UBInt16('device_number'), construct.UBInt16('inode_number'),
        construct.UBInt16('mode'), construct.UBInt16('user_identifier'),
        construct.UBInt16('group_identifier'),
        construct.UBInt16('number_of_links'),
        construct.UBInt16('special_device_number'),
        construct.UBInt16('modification_time_upper'),
        construct.UBInt16('modification_time_lower'),
        construct.UBInt16('path_string_size'),
        construct.UBInt16('file_size_upper'),
        construct.UBInt16('file_size_lower'))

    _CPIO_BINARY_LITTLE_ENDIAN_FILE_ENTRY_STRUCT = construct.Struct(
        'cpio_binary_little_endian_file_entry', construct.ULInt16('signature'),
        construct.ULInt16('device_number'), construct.ULInt16('inode_number'),
        construct.ULInt16('mode'), construct.ULInt16('user_identifier'),
        construct.ULInt16('group_identifier'),
        construct.ULInt16('number_of_links'),
        construct.ULInt16('special_device_number'),
        construct.ULInt16('modification_time_upper'),
        construct.ULInt16('modification_time_lower'),
        construct.ULInt16('path_string_size'),
        construct.ULInt16('file_size_upper'),
        construct.ULInt16('file_size_lower'))

    _CPIO_PORTABLE_ASCII_FILE_ENTRY_STRUCT = construct.Struct(
        'cpio_portable_ascii_file_entry', construct.Bytes('signature', 6),
        construct.Bytes('device_number',
                        6), construct.Bytes('inode_number', 6),
        construct.Bytes('mode', 6), construct.Bytes('user_identifier', 6),
        construct.Bytes('group_identifier', 6),
        construct.Bytes('number_of_links', 6),
        construct.Bytes('special_device_number', 6),
        construct.Bytes('modification_time', 11),
        construct.Bytes('path_string_size', 6),
        construct.Bytes('file_size', 11))

    _CPIO_NEW_ASCII_FILE_ENTRY_STRUCT = construct.Struct(
        'cpio_portable_ascii_file_entry', construct.Bytes('signature', 6),
        construct.Bytes('inode_number', 8), construct.Bytes('mode', 8),
        construct.Bytes('user_identifier', 8),
        construct.Bytes('group_identifier', 8),
        construct.Bytes('number_of_links', 8),
        construct.Bytes('modification_time', 8),
        construct.Bytes('file_size', 8),
        construct.Bytes('device_major_number', 8),
        construct.Bytes('device_minor_number', 8),
        construct.Bytes('special_device_major_number', 8),
        construct.Bytes('special_device_minor_number', 8),
        construct.Bytes('path_string_size', 8), construct.Bytes('checksum', 8))

    def __init__(self):
        """Initializes the CPIO archive file object."""
        super(CPIOArchiveFile, self).__init__()
        self._file_entries = None
        self._file_object = None
        self._file_object_opened_in_object = False
        self._file_size = 0

        self.file_format = None

    def _ReadFileEntry(self, file_object, file_offset):
        """Reads a file entry.

    Args:
      file_object (FileIO): file-like object.
      file_offset (int): current file offset.

    Raises:
      IOError: if the file entry cannot be read.
    """
        file_object.seek(file_offset, os.SEEK_SET)

        if self.file_format == 'bin-big-endian':
            file_entry_struct = self._CPIO_BINARY_BIG_ENDIAN_FILE_ENTRY_STRUCT
        elif self.file_format == 'bin-little-endian':
            file_entry_struct = self._CPIO_BINARY_LITTLE_ENDIAN_FILE_ENTRY_STRUCT
        elif self.file_format == 'odc':
            file_entry_struct = self._CPIO_PORTABLE_ASCII_FILE_ENTRY_STRUCT
        elif self.file_format in ('crc', 'newc'):
            file_entry_struct = self._CPIO_NEW_ASCII_FILE_ENTRY_STRUCT

        file_entry_struct_size = file_entry_struct.sizeof()

        try:
            file_entry_struct = file_entry_struct.parse_stream(file_object)
        except construct.FieldError as exception:
            raise IOError(
                ('Unable to parse file entry data section with error: '
                 '{0:s}').format(exception))

        file_offset += file_entry_struct_size

        if self.file_format in ('bin-big-endian', 'bin-little-endian'):
            inode_number = file_entry_struct.inode_number
            mode = file_entry_struct.mode
            user_identifier = file_entry_struct.user_identifier
            group_identifier = file_entry_struct.group_identifier

            modification_time = (
                (file_entry_struct.modification_time_upper << 16)
                | file_entry_struct.modification_time_lower)

            path_string_size = file_entry_struct.path_string_size

            file_size = ((file_entry_struct.file_size_upper << 16)
                         | file_entry_struct.file_size_lower)

        elif self.file_format == 'odc':
            inode_number = int(file_entry_struct.inode_number, 8)
            mode = int(file_entry_struct.mode, 8)
            user_identifier = int(file_entry_struct.user_identifier, 8)
            group_identifier = int(file_entry_struct.group_identifier, 8)
            modification_time = int(file_entry_struct.modification_time, 8)
            path_string_size = int(file_entry_struct.path_string_size, 8)
            file_size = int(file_entry_struct.file_size, 8)

        elif self.file_format in ('crc', 'newc'):
            inode_number = int(file_entry_struct.inode_number, 16)
            mode = int(file_entry_struct.mode, 16)
            user_identifier = int(file_entry_struct.user_identifier, 16)
            group_identifier = int(file_entry_struct.group_identifier, 16)
            modification_time = int(file_entry_struct.modification_time, 16)
            path_string_size = int(file_entry_struct.path_string_size, 16)
            file_size = int(file_entry_struct.file_size, 16)

        path_string_data = file_object.read(path_string_size)
        file_offset += path_string_size

        # TODO: should this be ASCII?
        path_string = path_string_data.decode('ascii')
        path_string, _, _ = path_string.partition('\x00')

        if self.file_format in ('bin-big-endian', 'bin-little-endian'):
            padding_size = file_offset % 2
            if padding_size > 0:
                padding_size = 2 - padding_size

        elif self.file_format == 'odc':
            padding_size = 0

        elif self.file_format in ('crc', 'newc'):
            padding_size = file_offset % 4
            if padding_size > 0:
                padding_size = 4 - padding_size

        file_offset += padding_size

        file_entry = CPIOArchiveFileEntry()
        file_entry.data_offset = file_offset
        file_entry.data_size = file_size
        file_entry.group_identifier = group_identifier
        file_entry.inode_number = inode_number
        file_entry.modification_time = modification_time
        file_entry.path = path_string
        file_entry.mode = mode
        file_entry.size = (file_entry_struct_size + path_string_size +
                           padding_size + file_size)
        file_entry.user_identifier = user_identifier

        file_offset += file_size

        if self.file_format in ('bin-big-endian', 'bin-little-endian'):
            padding_size = file_offset % 2
            if padding_size > 0:
                padding_size = 2 - padding_size

        elif self.file_format == 'odc':
            padding_size = 0

        elif self.file_format in ('crc', 'newc'):
            padding_size = file_offset % 4
            if padding_size > 0:
                padding_size = 4 - padding_size

        if padding_size > 0:
            file_entry.size += padding_size

        return file_entry

    def _ReadFileEntries(self, file_object):
        """Reads the file entries from the cpio archive.

    Args:
      file_object (FileIO): file-like object.
    """
        self._file_entries = {}

        file_offset = 0
        while file_offset < self._file_size:
            file_entry = self._ReadFileEntry(file_object, file_offset)
            file_offset += file_entry.size
            if file_entry.path == 'TRAILER!!!':
                break

            if file_entry.path in self._file_entries:
                # TODO: alert on file entries with duplicate paths?
                continue

            self._file_entries[file_entry.path] = file_entry

    def Close(self):
        """Closes the CPIO archive file."""
        self._file_entries = None
        self._file_object = None
        self._file_size = None

    def FileEntryExistsByPath(self, path):
        """Determines if file entry for a specific path exists.

    Returns:
      bool: True if the file entry exists.
    """
        if self._file_entries is None:
            return False

        return path in self._file_entries

    def GetFileEntries(self, path_prefix=''):
        """Retrieves the file entries.

    Args:
      path_prefix (str): path prefix.

    Yields:
      CPIOArchiveFileEntry: a CPIO archive file entry.
    """
        if self._file_entries:
            for path, file_entry in iter(self._file_entries.items()):
                if path.startswith(path_prefix):
                    yield file_entry

    def GetFileEntryByPath(self, path):
        """Retrieves a file entry for a specific path.

    Returns:
      CPIOArchiveFileEntry: a CPIO archive file entry or None if not available.
    """
        if self._file_entries:
            return self._file_entries.get(path, None)

    def Open(self, file_object):
        """Opens the CPIO archive file.

    Args:
      file_object (FileIO): a file-like object.

    Raises:
      IOError: if the file format signature is not supported.
    """
        file_object.seek(0, os.SEEK_SET)
        signature_data = file_object.read(6)

        self.file_format = None
        if len(signature_data) > 2:
            if signature_data[:2] == self._CPIO_SIGNATURE_BINARY_BIG_ENDIAN:
                self.file_format = 'bin-big-endian'
            elif signature_data[:
                                2] == self._CPIO_SIGNATURE_BINARY_LITTLE_ENDIAN:
                self.file_format = 'bin-little-endian'
            elif signature_data == self._CPIO_SIGNATURE_PORTABLE_ASCII:
                self.file_format = 'odc'
            elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII:
                self.file_format = 'newc'
            elif signature_data == self._CPIO_SIGNATURE_NEW_ASCII_WITH_CHECKSUM:
                self.file_format = 'crc'

        if self.file_format is None:
            raise IOError('Unsupported CPIO format.')

        self._file_object = file_object
        self._file_size = file_object.get_size()

        self._ReadFileEntries(self._file_object)

    def ReadDataAtOffset(self, file_offset, size):
        """Reads a byte string from the file-like object at a specific offset.

    Args:
      file_offset (int): file offset.
      size (int): number of bytes to read.

    Returns:
      bytes: data read.

    Raises:
      IOError: if the read failed.
    """
        self._file_object.seek(file_offset, os.SEEK_SET)
        return self._file_object.read(size)
Exemplo n.º 19
0
class DataBlockFile(object):
  """Class that contains a data block file."""

  SIGNATURE = 0xc104cac3

  # TODO: update emtpy, hints, updating and user.
  _FILE_HEADER = construct.Struct(
      u'chrome_cache_data_file_header',
      construct.ULInt32(u'signature'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt16(u'major_version'),
      construct.ULInt16(u'file_number'),
      construct.ULInt16(u'next_file_number'),
      construct.ULInt32(u'block_size'),
      construct.ULInt32(u'number_of_entries'),
      construct.ULInt32(u'maximum_number_of_entries'),
      construct.Array(4, construct.ULInt32(u'emtpy')),
      construct.Array(4, construct.ULInt32(u'hints')),
      construct.ULInt32(u'updating'),
      construct.Array(5, construct.ULInt32(u'user')),
      construct.Array(2028, construct.ULInt32(u'allocation_bitmap')))

  _CACHE_ENTRY = construct.Struct(
      u'chrome_cache_entry',
      construct.ULInt32(u'hash'),
      construct.ULInt32(u'next_address'),
      construct.ULInt32(u'rankings_node_address'),
      construct.ULInt32(u'reuse_count'),
      construct.ULInt32(u'refetch_count'),
      construct.ULInt32(u'state'),
      construct.ULInt64(u'creation_time'),
      construct.ULInt32(u'key_size'),
      construct.ULInt32(u'long_key_address'),
      construct.Array(4, construct.ULInt32(u'data_stream_sizes')),
      construct.Array(4, construct.ULInt32(u'data_stream_addresses')),
      construct.ULInt32(u'flags'),
      construct.Padding(16),
      construct.ULInt32(u'self_hash'),
      construct.Array(160, construct.UBInt8(u'key')))

  def __init__(self, debug=False):
    """Initializes the data block file object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
    """
    super(DataBlockFile, self).__init__()
    self._debug = debug
    self._file_object = None
    self._file_object_opened_in_object = False
    self.creation_time = None
    self.block_size = None
    self.number_of_entries = None
    self.version = None

  def _ReadFileHeader(self):
    """Reads the file header.

    Raises:
      IOError: if the file header cannot be read.
    """
    if self._debug:
      print(u'Seeking file header offset: 0x{0:08x}'.format(0))

    self._file_object.seek(0, os.SEEK_SET)

    file_header_data = self._file_object.read(self._FILE_HEADER.sizeof())

    if self._debug:
      print(u'Data block file header data:')
      print(hexdump.Hexdump(file_header_data))

    try:
      file_header = self._FILE_HEADER.parse(file_header_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse file header with error: {0:s}'.format(
          exception))

    signature = file_header.get(u'signature')

    if signature != self.SIGNATURE:
      raise IOError(u'Unsupported data block file signature')

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'),
        file_header.get(u'minor_version'))

    if self.version not in [u'2.0', u'2.1']:
      raise IOError(u'Unsupported data block file version: {0:s}'.format(
          self.version))

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'), file_header.get(u'minor_version'))

    self.block_size = file_header.get(u'block_size')
    self.number_of_entries = file_header.get(u'number_of_entries')

    if self._debug:
      print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(signature))

      print(u'Version\t\t\t\t\t\t\t\t\t: {0:s}'.format(self.version))

      print(u'File number\t\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'file_number')))

      print(u'Next file number\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'next_file_number')))

      print(u'Block size\t\t\t\t\t\t\t\t: {0:d}'.format(self.block_size))

      print(u'Number of entries\t\t\t\t\t\t\t: {0:d}'.format(
          self.number_of_entries))

      print(u'Maximum number of entries\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'maximum_number_of_entries')))

      # TODO: print emtpy, hints, updating and user.

      block_number = 0
      block_range_start = 0
      block_range_end = 0
      in_block_range = False
      for value_32bit in file_header.get(u'allocation_bitmap'):
        for unused_bit in range(0, 32):
          if value_32bit & 0x00000001:
            if not in_block_range:
              block_range_start = block_number
              block_range_end = block_number
              in_block_range = True

            block_range_end += 1

          elif in_block_range:
            in_block_range = False

            if self._debug:
              print(u'Block range\t: {0:d} - {1:d} ({2:d})'.format(
                  block_range_start, block_range_end,
                  block_range_end - block_range_start))

          value_32bit >>= 1
          block_number += 1

      print(u'')

  def ReadCacheEntry(self, block_offset):
    """Reads a cache entry.

    Args:
      block_offset (int): offset of the block that contains the cache entry.
    ""
    if self._debug:
      print(u'Seeking cache entry offset: 0x{0:08x}'.format(block_offset))

    self._file_object.seek(block_offset, os.SEEK_SET)

    cache_entry_data = self._file_object.read(self._CACHE_ENTRY.sizeof())

    if self._debug:
      print(u'Data block file cache entry data:')
      print(hexdump.Hexdump(cache_entry_data))

    try:
      cache_entry_struct = self._CACHE_ENTRY.parse(cache_entry_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse cache entry with error: {0:s}'.format(
          exception))

    cache_entry = CacheEntry()

    cache_entry.hash = cache_entry_struct.get(u'hash')

    cache_entry.next = CacheAddress(cache_entry_struct.get(u'next_address'))
    cache_entry.rankings_node = CacheAddress(cache_entry_struct.get(
        u'rankings_node_address'))

    cache_entry.creation_time = cache_entry_struct.get(u'creation_time')

    byte_array = cache_entry_struct.get(u'key')
    byte_string = b''.join(map(chr, byte_array))
    cache_entry.key, _, _ = byte_string.partition(b'\x00')

    if self._debug:
      print(u'Hash\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(cache_entry.hash))

      print(u'Next address\t\t\t\t\t\t\t\t: {0:s}'.format(
          cache_entry.next.GetDebugString()))

      print(u'Rankings node address\t\t\t\t\t\t\t: {0:s}'.format(
          cache_entry.rankings_node.GetDebugString()))

      print(u'Reuse count\t\t\t\t\t\t\t\t: {0:d}'.format(
          cache_entry_struct.get(u'reuse_count')))

      print(u'Refetch count\t\t\t\t\t\t\t\t: {0:d}'.format(
          cache_entry_struct.get(u'refetch_count')))

      print(u'State\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'state')))

      date_string = (datetime.datetime(1601, 1, 1) +
                     datetime.timedelta(microseconds=cache_entry.creation_time))

      print(u'Creation time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format(
          date_string, cache_entry.creation_time))

      for value in cache_entry_struct.get(u'data_stream_sizes'):
        print(u'Data stream size\t\t\t\t\t\t\t: {0:d}'.format(value))

      cache_address_index = 0
      for value in cache_entry_struct.get(u'data_stream_addresses'):
        cache_address = CacheAddress(value)
        print(u'Data stream address: {0:d}\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      print(u'Flags\t\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'flags')))

      print(u'Self hash\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          cache_entry_struct.get(u'self_hash')))

      try:
        cache_entry_key = cache_entry.key.decode(u'ascii')
      except UnicodeDecodeError:
        logging.warning((
            u'Unable to decode cache entry key at cache address: '
            u'0x{0:08x}. Characters that cannot be decoded will be '
            u'replaced with "?" or "\\ufffd".').format(cache_address.value))
        cache_entry_key = cache_entry.key.decode(u'ascii', errors=u'replace')

      print(u'Key\t\t\t\t\t\t\t\t\t: {0:s}'.format(cache_entry_key))

      # TODO: calculate and verify hash.

      print(u'')

    return cache_entry

  def Close(self):
    """Closes the data block file."""
    if self._file_object_opened_in_object:
      self._file_object.close()
    self._file_object = None

  def Open(self, filename):
    """Opens the data block file.

    Args:
      filename (str): path of the file.
    """
    self._file_object = open(filename, 'rb')
    self._file_object_opened_in_object = True
    self._ReadFileHeader()

  def OpenFileObject(self, file_object):
    """Opens the data block file.

    Args:
      file_object (file): file-like object.
Exemplo n.º 20
0
class IndexFile(object):
  """Class that contains an index file."""

  SIGNATURE = 0xc103cac3

  _FILE_HEADER = construct.Struct(
      u'chrome_cache_index_file_header',
      construct.ULInt32(u'signature'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt16(u'major_version'),
      construct.ULInt32(u'number_of_entries'),
      construct.ULInt32(u'stored_data_size'),
      construct.ULInt32(u'last_created_file_number'),
      construct.ULInt32(u'unknown1'),
      construct.ULInt32(u'unknown2'),
      construct.ULInt32(u'table_size'),
      construct.ULInt32(u'unknown3'),
      construct.ULInt32(u'unknown4'),
      construct.ULInt64(u'creation_time'),
      construct.Padding(208))

  _LRU_DATA = construct.Struct(
      u'chrome_cache_index_file_lru_data',
      construct.Padding(8),
      construct.ULInt32(u'filled_flag'),
      construct.Array(5, construct.ULInt32(u'sizes')),
      construct.Array(5, construct.ULInt32(u'head_addresses')),
      construct.Array(5, construct.ULInt32(u'tail_addresses')),
      construct.ULInt32(u'transaction_address'),
      construct.ULInt32(u'operation'),
      construct.ULInt32(u'operation_list'),
      construct.Padding(28))

  def __init__(self, debug=False):
    """Initializes the index file object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
    """
    super(IndexFile, self).__init__()
    self._debug = debug
    self._file_object = None
    self._file_object_opened_in_object = False
    self.creation_time = None
    self.version = None
    self.index_table = {}

  def _ReadFileHeader(self):
    """Reads the file header.

    Raises:
      IOError: if the file header cannot be read.
    """
    if self._debug:
      print(u'Seeking file header offset: 0x{0:08x}'.format(0))

    self._file_object.seek(0, os.SEEK_SET)

    file_header_data = self._file_object.read(self._FILE_HEADER.sizeof())

    if self._debug:
      print(u'Index file header data:')
      print(hexdump.Hexdump(file_header_data))

    try:
      file_header = self._FILE_HEADER.parse(file_header_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse file header with error: {0:s}'.format(
          exception))

    signature = file_header.get(u'signature')

    if signature != self.SIGNATURE:
      raise IOError(u'Unsupported index file signature')

    self.version = u'{0:d}.{1:d}'.format(
        file_header.get(u'major_version'),
        file_header.get(u'minor_version'))

    if self.version not in [u'2.0', u'2.1']:
      raise IOError(u'Unsupported index file version: {0:s}'.format(
          self.version))

    self.creation_time = file_header.get(u'creation_time')

    if self._debug:
      print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(signature))

      print(u'Version\t\t\t\t\t\t\t\t\t: {0:s}'.format(self.version))

      print(u'Number of entries\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'number_of_entries')))

      print(u'Stored data size\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'stored_data_size')))

      print(u'Last created file number\t\t\t\t\t\t: f_{0:06x}'.format(
          file_header.get(u'last_created_file_number')))

      print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown1')))

      print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown2')))

      print(u'Table size\t\t\t\t\t\t\t\t: {0:d}'.format(
          file_header.get(u'table_size')))

      print(u'Unknown3\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown3')))

      print(u'Unknown4\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          file_header.get(u'unknown4')))

      date_string = (
          datetime.datetime(1601, 1, 1) +
          datetime.timedelta(microseconds=self.creation_time))

      print(u'Creation time\t\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.format(
          date_string, self.creation_time))

      print(u'')

  def _ReadLruData(self):
    """Reads the LRU data."""
    lru_data = self._file_object.read(self._LRU_DATA.sizeof())

    if self._debug:
      print(u'Index file LRU data:')
      print(hexdump.Hexdump(lru_data))

    try:
      index_file_lru = self._LRU_DATA.parse(lru_data)
    except construct.FieldError as exception:
      raise IOError(u'Unable to parse LRU data with error: {0:s}'.format(
          exception))

    if self._debug:
      print(u'Filled flag\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'filled_flag')))

      for value in index_file_lru.get(u'sizes'):
        print(u'Size\t\t\t\t\t\t\t\t\t: {0:d}'.format(value))

      cache_address_index = 0
      for value in index_file_lru.get(u'head_addresses'):
        cache_address = CacheAddress(value)
        print(u'Head address: {0:d}\t\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      cache_address_index = 0
      for value in index_file_lru.get(u'tail_addresses'):
        cache_address = CacheAddress(value)
        print(u'Tail address: {0:d}\t\t\t\t\t\t\t\t: {1:s}'.format(
            cache_address_index, cache_address.GetDebugString()))
        cache_address_index += 1

      cache_address = CacheAddress(index_file_lru.get(u'transaction_address'))
      print(u'Transaction address\t\t\t\t\t\t\t: {0:s}'.format(
          cache_address.GetDebugString()))

      print(u'Operation\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'operation')))

      print(u'Operation list\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
          index_file_lru.get(u'operation_list')))

      print(u'')

  def _ReadIndexTable(self):
    """Reads the index table."""
    cache_address_index = 0
    cache_address_data = self._file_object.read(4)

    while len(cache_address_data) == 4:
      value = construct.ULInt32(u'cache_address').parse(cache_address_data)

      if value:
        cache_address = CacheAddress(value)

        if self._debug:
          print(u'Cache address: {0:d}\t\t\t\t\t\t\t: {1:s}'.format(
              cache_address_index, cache_address.GetDebugString()))

        self.index_table[cache_address_index] = cache_address

      cache_address_index += 1
      cache_address_data = self._file_object.read(4)

    if self._debug:
      print(u'')

  def Close(self):
    """Closes the index file."""
    if self._file_object_opened_in_object:
      self._file_object.close()
    self._file_object = None

  def Open(self, filename):
    """Opens the index file.

    Args:
      filename (str): path of the file.
    """
    self._file_object = open(filename, 'rb')
    self._file_object_opened_in_object = True
    self._ReadFileHeader()
    self._ReadLruData()
    self._ReadIndexTable()

  def OpenFileObject(self, file_object):
    """Opens the index file-like object.

    Args:
      file_object (file): file-like object.
    """
    self._file_object = file_object
    self._file_object_opened_in_object = False
    self._ReadFileHeader()
    self._ReadLruData()
    self._ReadIndexTable()
Exemplo n.º 21
0
class NTFSUsnJrnlParser(interface.FileObjectParser):
    """Parses a NTFS USN change journal."""

    _INITIAL_FILE_OFFSET = None

    NAME = u'usnjrnl'
    DESCRIPTION = u'Parser for NTFS USN change journal ($UsnJrnl).'

    _USN_RECORD_V2 = construct.Struct(
        u'usn_record_v2', construct.ULInt32(u'size'),
        construct.ULInt16(u'major_version'),
        construct.ULInt16(u'minor_version'),
        construct.ULInt64(u'file_reference'),
        construct.ULInt64(u'parent_file_reference'),
        construct.ULInt64(u'update_sequence_number'),
        construct.ULInt64(u'update_date_time'),
        construct.ULInt32(u'update_reason_flags'),
        construct.ULInt32(u'update_source_flags'),
        construct.ULInt32(u'security_descriptor_identifier'),
        construct.ULInt32(u'file_attribute_flags'),
        construct.ULInt16(u'name_size'), construct.ULInt16(u'name_offset'),
        construct.String(u'name', lambda ctx: ctx.size - 60))

    # TODO: add support for USN_RECORD_V3 when actually seen to be used.

    def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
        """Parses an USN change journal.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal.
    """
        if not usn_change_journal:
            return

        usn_record_data = usn_change_journal.read_usn_record()
        while usn_record_data:
            current_offset = usn_change_journal.get_offset()

            try:
                usn_record_struct = self._USN_RECORD_V2.parse(usn_record_data)
            except (IOError, construct.FieldError) as exception:
                parser_mediator.ProduceExtractionError(
                    (u'unable to parse USN record at offset: 0x{0:08x} '
                     u'with error: {1:s}').format(current_offset, exception))
                continue

            name_offset = usn_record_struct.name_offset - 60
            utf16_stream = usn_record_struct.name[name_offset:usn_record_struct
                                                  .name_size]

            try:
                name_string = utf16_stream.decode(u'utf-16-le')
            except (UnicodeDecodeError, UnicodeEncodeError) as exception:
                name_string = utf16_stream.decode(u'utf-16-le',
                                                  errors=u'replace')
                parser_mediator.ProduceExtractionError((
                    u'unable to decode USN record name string with error: '
                    u'{0:s}. Characters that cannot be decoded will be replaced '
                    u'with "?" or "\\ufffd".').format(exception))

            event_data = NTFSUSNChangeEventData()
            event_data.file_attribute_flags = usn_record_struct.file_attribute_flags
            event_data.file_reference = usn_record_struct.file_reference
            event_data.filename = name_string
            event_data.offset = current_offset
            event_data.parent_file_reference = usn_record_struct.parent_file_reference
            event_data.update_reason_flags = usn_record_struct.update_reason_flags
            event_data.update_sequence_number = (
                usn_record_struct.update_sequence_number)
            event_data.update_source_flags = usn_record_struct.update_source_flags

            if not usn_record_struct.update_date_time:
                date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=usn_record_struct.update_date_time)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            usn_record_data = usn_change_journal.read_usn_record()

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a NTFS $UsnJrnl metadata file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        volume = pyfsntfs.volume()
        try:
            volume.open_file_object(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to open NTFS volume with error: {0:s}'.format(
                    exception))

        try:
            usn_change_journal = volume.get_usn_change_journal()
            self._ParseUSNChangeJournal(parser_mediator, usn_change_journal)
        finally:
            volume.close()
Exemplo n.º 22
0
class UtmpParser(interface.SingleFileBaseParser):
    """Parser for Linux/Unix UTMP files."""

    _INITIAL_FILE_OFFSET = None

    NAME = 'utmp'
    DESCRIPTION = u'Parser for Linux/Unix UTMP files.'

    LINUX_UTMP_ENTRY = construct.Struct('utmp_linux',
                                        construct.ULInt32('type'),
                                        construct.ULInt32('pid'),
                                        construct.String('terminal', 32),
                                        construct.ULInt32('terminal_id'),
                                        construct.String('username', 32),
                                        construct.String('hostname', 256),
                                        construct.ULInt16('termination'),
                                        construct.ULInt16('exit'),
                                        construct.ULInt32('session'),
                                        construct.ULInt32('timestamp'),
                                        construct.ULInt32('microsecond'),
                                        construct.ULInt32('address_a'),
                                        construct.ULInt32('address_b'),
                                        construct.ULInt32('address_c'),
                                        construct.ULInt32('address_d'),
                                        construct.Padding(20))

    LINUX_UTMP_ENTRY_SIZE = LINUX_UTMP_ENTRY.sizeof()

    STATUS_TYPE = {
        0: 'EMPTY',
        1: 'RUN_LVL',
        2: 'BOOT_TIME',
        3: 'NEW_TIME',
        4: 'OLD_TIME',
        5: 'INIT_PROCESS',
        6: 'LOGIN_PROCESS',
        7: 'USER_PROCESS',
        8: 'DEAD_PROCESS',
        9: 'ACCOUNTING'
    }

    # Set a default test value for few fields, this is supposed to be a text
    # that is highly unlikely to be seen in a terminal field, or a username field.
    # It is important that this value does show up in such fields, but otherwise
    # it can be a free flowing text field.
    _DEFAULT_TEST_VALUE = u'Ekki Fraedilegur Moguleiki, thetta er bull ! = + _<>'

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses an UTMP file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: The file-like object to extract data from.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        file_object.seek(0, os.SEEK_SET)
        try:
            structure = self.LINUX_UTMP_ENTRY.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse UTMP Header with error: {0:s}'.format(
                    exception))

        if structure.type not in self.STATUS_TYPE:
            raise errors.UnableToParseFile(
                (u'Not an UTMP file, unknown type '
                 u'[{0:d}].').format(structure.type))

        if not self._VerifyTextField(structure.terminal):
            raise errors.UnableToParseFile(
                u'Not an UTMP file, unknown terminal.')

        if not self._VerifyTextField(structure.username):
            raise errors.UnableToParseFile(
                u'Not an UTMP file, unknown username.')

        if not self._VerifyTextField(structure.hostname):
            raise errors.UnableToParseFile(
                u'Not an UTMP file, unknown hostname.')

        # Check few values.
        terminal = self._GetTextFromNullTerminatedString(
            structure.terminal, self._DEFAULT_TEST_VALUE)
        if terminal == self._DEFAULT_TEST_VALUE:
            raise errors.UnableToParseFile(
                u'Not an UTMP file, no terminal set.')

        username = self._GetTextFromNullTerminatedString(
            structure.username, self._DEFAULT_TEST_VALUE)

        if username == self._DEFAULT_TEST_VALUE:
            raise errors.UnableToParseFile(
                u'Not an UTMP file, no username set.')

        if not structure.timestamp:
            raise errors.UnableToParseFile(
                u'Not an UTMP file, no timestamp set in the first record.')

        file_object.seek(0, os.SEEK_SET)
        event_object = self._ReadUtmpEvent(file_object)
        while event_object:
            event_object.offset = file_object.tell()
            parser_mediator.ProduceEvent(event_object)
            event_object = self._ReadUtmpEvent(file_object)

    def _VerifyTextField(self, text):
        """Check if a byte stream is a null terminated string.

    Args:
      event_object: text field from the structure.

    Return:
      True if it is a null terminated string, False otherwise.
    """
        _, _, null_chars = text.partition(b'\x00')
        if not null_chars:
            return False
        return len(null_chars) == null_chars.count(b'\x00')

    def _ReadUtmpEvent(self, file_object):
        """Returns an UtmpEvent from a single UTMP entry.

    Args:
      file_object: a file-like object that points to an UTMP file.

    Returns:
      An event object constructed from a single UTMP record or None if we
      have reached the end of the file (or EOF).
    """
        offset = file_object.tell()
        data = file_object.read(self.LINUX_UTMP_ENTRY_SIZE)
        if not data or len(data) != self.LINUX_UTMP_ENTRY_SIZE:
            return
        try:
            entry = self.LINUX_UTMP_ENTRY.parse(data)
        except (IOError, construct.FieldError):
            logging.warning(
                (u'UTMP entry at 0x{:x} couldn\'t be parsed.').format(offset))
            return self._ReadUtmpEvent(file_object)

        user = self._GetTextFromNullTerminatedString(entry.username)
        terminal = self._GetTextFromNullTerminatedString(entry.terminal)
        if terminal == '~':
            terminal = u'system boot'
        computer_name = self._GetTextFromNullTerminatedString(entry.hostname)
        if computer_name == u'N/A' or computer_name == u':0':
            computer_name = u'localhost'
        status = self.STATUS_TYPE.get(entry.type, u'N/A')

        if not entry.address_b:
            try:
                ip_address = socket.inet_ntoa(
                    construct.ULInt32('int').build(entry.address_a))
                if ip_address == '0.0.0.0':
                    ip_address = u'localhost'
            except (IOError, construct.FieldError, socket.error):
                ip_address = u'N/A'
        else:
            ip_address = u'{0:d}.{1:d}.{2:d}.{3:d}'.format(
                entry.address_a, entry.address_b, entry.address_c,
                entry.address_d)

        return UtmpEvent(entry.timestamp, entry.microsecond, user,
                         computer_name, terminal, status, ip_address, entry)

    def _GetTextFromNullTerminatedString(self,
                                         null_terminated_string,
                                         default_string=u'N/A'):
        """Get a UTF-8 text from a raw null terminated string.

    Args:
      null_terminated_string: Raw string terminated with null character.
      default_string: The default string returned if the parser fails.

    Returns:
      A decoded UTF-8 string or if unable to decode, the supplied default
      string.
    """
        text, _, _ = null_terminated_string.partition('\x00')
        try:
            text = text.decode('utf-8')
        except UnicodeDecodeError:
            logging.warning(
                u'[UTMP] Decode UTF8 failed, the message string may be cut short.'
            )
            text = text.decode('utf-8', 'ignore')
        if not text:
            return default_string
        return text
Exemplo n.º 23
0
class NTFSUsnJrnlParser(interface.FileObjectParser):
  """Parses a NTFS USN change journal."""

  _INITIAL_FILE_OFFSET = None

  NAME = u'usnjrnl'
  DESCRIPTION = u'Parser for NTFS USN change journal ($UsnJrnl).'

  _USN_RECORD_V2 = construct.Struct(
      u'usn_record_v2',
      construct.ULInt32(u'size'),
      construct.ULInt16(u'major_version'),
      construct.ULInt16(u'minor_version'),
      construct.ULInt64(u'file_reference'),
      construct.ULInt64(u'parent_file_reference'),
      construct.ULInt64(u'update_sequence_number'),
      construct.ULInt64(u'update_date_time'),
      construct.ULInt32(u'update_reason_flags'),
      construct.ULInt32(u'update_source_flags'),
      construct.ULInt32(u'security_descriptor_identifier'),
      construct.ULInt32(u'file_attribute_flags'),
      construct.ULInt16(u'name_size'),
      construct.ULInt16(u'name_offset'),
      construct.String(u'name', lambda ctx: ctx.size - 60))

  # TODO: add support for USN_RECORD_V3 when actually seen to be used.

  def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
    """Parses an USN change journal.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      usn_change_journal: An USN change journal object (instance of
                          pyfsntsfs.usn_change_journal).
    """
    if not usn_change_journal:
      return

    usn_record_data = usn_change_journal.read_usn_record()
    while usn_record_data:
      current_offset = usn_change_journal.get_offset()

      try:
        usn_record_struct = self._USN_RECORD_V2.parse(usn_record_data)
      except (IOError, construct.FieldError) as exception:
        parser_mediator.ProduceParseError((
            u'unable to parse USN record at offset: 0x{0:08x} '
            u'with error: {1:s}').format(current_offset, exception))
        continue

      name_offset = usn_record_struct.name_offset - 60
      utf16_stream = usn_record_struct.name[
          name_offset:usn_record_struct.name_size]

      try:
        name_string = utf16_stream.decode(u'utf-16-le')
      except (UnicodeDecodeError, UnicodeEncodeError) as exception:
        name_string = utf16_stream.decode(u'utf-16-le', errors=u'replace')
        parser_mediator.ProduceParseError((
            u'unable to decode USN record name string with error: '
            u'{0:s}. Characters that cannot be decoded will be replaced '
            u'with "?" or "\\ufffd".').format(exception))

      event_object = file_system_events.NTFSUSNChangeEvent(
          usn_record_struct.update_date_time, current_offset,
          name_string, usn_record_struct.file_reference,
          usn_record_struct.update_sequence_number,
          usn_record_struct.update_source_flags,
          usn_record_struct.update_reason_flags,
          file_attribute_flags=usn_record_struct.file_attribute_flags,
          parent_file_reference=usn_record_struct.parent_file_reference)
      parser_mediator.ProduceEvent(event_object)

      usn_record_data = usn_change_journal.read_usn_record()

  def ParseFileObject(self, parser_mediator, file_object, **kwargs):
    """Parses a NTFS $UsnJrnl metadata file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.
    """
    volume = pyfsntfs.volume()
    try:
      volume.open_file_object(file_object)
    except IOError as exception:
      parser_mediator.ProduceParseError(
          u'unable to open NTFS volume with error: {0:s}'.format(exception))

    try:
      usn_change_journal = volume.get_usn_change_journal()
      self._ParseUSNChangeJournal(parser_mediator, usn_change_journal)
    finally:
      volume.close()
Exemplo n.º 24
0
    def DetermineCacheEntrySize(self, format_type, value_data,
                                cached_entry_offset):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the first cached entry data relative to the start of
                           the value data.

    Returns:
      The cached entry size if successful or None otherwise.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[cached_entry_offset:]
        cached_entry_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof()

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = construct.ULInt16('path_size').parse(
                cached_entry_data[0:2])
            maximum_path_size = construct.ULInt16('maximum_path_size').parse(
                cached_entry_data[2:4])
            path_offset_32bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[4:8])
            path_offset_64bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[8:16])

            if maximum_path_size < path_size:
                logging.error(u'Path size value out of bounds.')
                return

            path_end_of_string_size = maximum_path_size - path_size
            if path_size == 0 or path_end_of_string_size != 2:
                logging.error(u'Unsupported path size values.')
                return

            # Assume the entry is 64-bit if the 32-bit path offset is 0 and
            # the 64-bit path offset is set.
            if path_offset_32bit == 0 and path_offset_64bit != 0:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
                    )

            else:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(
                    )

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof()

        return cached_entry_size
Exemplo n.º 25
0
class AppCompatCacheKeyParser(object):
    """Class that parses the Application Compatibility Cache data."""

    FORMAT_TYPE_2000 = 1
    FORMAT_TYPE_XP = 2
    FORMAT_TYPE_2003 = 3
    FORMAT_TYPE_VISTA = 4
    FORMAT_TYPE_7 = 5
    FORMAT_TYPE_8 = 6

    # AppCompatCache format signature used in Windows XP.
    _HEADER_SIGNATURE_XP = 0xdeadbeef

    # AppCompatCache format used in Windows XP.
    _HEADER_XP_32BIT_STRUCT = construct.Struct(
        'appcompatcache_header_xp', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'),
        construct.ULInt32('unknown1'), construct.ULInt32('unknown2'),
        construct.Padding(384))

    _CACHED_ENTRY_XP_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_xp_32bit',
        construct.Array(528, construct.Byte('path')),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'), construct.ULInt64('last_update_time'))

    # AppCompatCache format signature used in Windows 2003, Vista and 2008.
    _HEADER_SIGNATURE_2003 = 0xbadc0ffe

    # AppCompatCache format used in Windows 2003.
    _HEADER_2003_STRUCT = construct.Struct(
        'appcompatcache_header_2003', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'))

    _CACHED_ENTRY_2003_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_2003_32bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'))

    _CACHED_ENTRY_2003_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_2003_64bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('unknown1'), construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt64('file_size'))

    # AppCompatCache format used in Windows Vista and 2008.
    _CACHED_ENTRY_VISTA_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_vista_32bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'))

    _CACHED_ENTRY_VISTA_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_vista_64bit',
        construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'),
        construct.ULInt32('unknown1'), construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'))

    # AppCompatCache format signature used in Windows 7 and 2008 R2.
    _HEADER_SIGNATURE_7 = 0xbadc0fee

    # AppCompatCache format used in Windows 7 and 2008 R2.
    _HEADER_7_STRUCT = construct.Struct(
        'appcompatcache_header_7', construct.ULInt32('signature'),
        construct.ULInt32('number_of_cached_entries'), construct.Padding(120))

    _CACHED_ENTRY_7_32BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_7_32bit', construct.ULInt16('path_size'),
        construct.ULInt16('maximum_path_size'),
        construct.ULInt32('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'),
        construct.ULInt32('data_size'), construct.ULInt32('data_offset'))

    _CACHED_ENTRY_7_64BIT_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_7_64bit', construct.ULInt16('path_size'),
        construct.ULInt16('maximum_path_size'), construct.ULInt32('unknown1'),
        construct.ULInt64('path_offset'),
        construct.ULInt64('last_modification_time'),
        construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'),
        construct.ULInt64('data_size'), construct.ULInt64('data_offset'))

    # AppCompatCache format used in Windows 8.0 and 8.1.
    _HEADER_SIGNATURE_8 = 0x00000080

    _HEADER_8_STRUCT = construct.Struct('appcompatcache_header_8',
                                        construct.ULInt32('signature'),
                                        construct.Padding(124))

    _CACHED_ENTRY_HEADER_8_STRUCT = construct.Struct(
        'appcompatcache_cached_entry_header_8', construct.ULInt32('signature'),
        construct.ULInt32('unknown1'),
        construct.ULInt32('cached_entry_data_size'),
        construct.ULInt16('path_size'))

    # AppCompatCache format used in Windows 8.0.
    _CACHED_ENTRY_SIGNATURE_8_0 = '00ts'

    # AppCompatCache format used in Windows 8.1.
    _CACHED_ENTRY_SIGNATURE_8_1 = '10ts'

    def CheckSignature(self, value_data):
        """Parses the signature.

    Args:
      value_data: a binary string containing the value data.

    Returns:
      The format type if successful or None otherwise.
    """
        signature = construct.ULInt32('signature').parse(value_data)
        if signature == self._HEADER_SIGNATURE_XP:
            return self.FORMAT_TYPE_XP

        elif signature == self._HEADER_SIGNATURE_2003:
            # TODO: determine which format version is used (2003 or Vista).
            return self.FORMAT_TYPE_2003

        elif signature == self._HEADER_SIGNATURE_7:
            return self.FORMAT_TYPE_7

        elif signature == self._HEADER_SIGNATURE_8:
            if value_data[signature:signature + 4] in [
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1
            ]:
                return self.FORMAT_TYPE_8

    def ParseHeader(self, format_type, value_data):
        """Parses the header.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.

    Returns:
      A header object (instance of AppCompatCacheHeader).

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        # TODO: change to collections.namedtuple or use __slots__ if the overhead
        # of a regular object becomes a problem.
        header_object = AppCompatCacheHeader()

        if format_type == self.FORMAT_TYPE_XP:
            header_object.header_size = self._HEADER_XP_32BIT_STRUCT.sizeof()
            header_struct = self._HEADER_XP_32BIT_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_2003:
            header_object.header_size = self._HEADER_2003_STRUCT.sizeof()
            header_struct = self._HEADER_2003_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            header_object.header_size = self._HEADER_VISTA_STRUCT.sizeof()
            header_struct = self._HEADER_VISTA_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_7:
            header_object.header_size = self._HEADER_7_STRUCT.sizeof()
            header_struct = self._HEADER_7_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_8:
            header_object.header_size = self._HEADER_8_STRUCT.sizeof()
            header_struct = self._HEADER_8_STRUCT.parse(value_data)

        if format_type in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7
        ]:
            header_object.number_of_cached_entries = header_struct.get(
                'number_of_cached_entries')

        return header_object

    def DetermineCacheEntrySize(self, format_type, value_data,
                                cached_entry_offset):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the first cached entry data relative to the start of
                           the value data.

    Returns:
      The cached entry size if successful or None otherwise.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[cached_entry_offset:]
        cached_entry_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof()

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = construct.ULInt16('path_size').parse(
                cached_entry_data[0:2])
            maximum_path_size = construct.ULInt16('maximum_path_size').parse(
                cached_entry_data[2:4])
            path_offset_32bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[4:8])
            path_offset_64bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[8:16])

            if maximum_path_size < path_size:
                logging.error(u'Path size value out of bounds.')
                return

            path_end_of_string_size = maximum_path_size - path_size
            if path_size == 0 or path_end_of_string_size != 2:
                logging.error(u'Unsupported path size values.')
                return

            # Assume the entry is 64-bit if the 32-bit path offset is 0 and
            # the 64-bit path offset is set.
            if path_offset_32bit == 0 and path_offset_64bit != 0:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
                    )

            else:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(
                    )

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof()

        return cached_entry_size

    def ParseCachedEntry(self, format_type, value_data, cached_entry_offset,
                         cached_entry_size):
        """Parses a cached entry.

    Args:
      format_type: integer value that contains the format type.
      value_data: a binary string containing the value data.
      cached_entry_offset: integer value that contains the offset of
                           the cached entry data relative to the start of
                           the value data.
      cached_entry_size: integer value that contains the cached entry data size.

    Returns:
      A cached entry object (instance of AppCompatCacheCachedEntry).

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8
        ]:
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[
            cached_entry_offset:cached_entry_offset + cached_entry_size]

        cached_entry_struct = None

        if format_type == self.FORMAT_TYPE_XP:
            if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_2003:
            if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_7:
            if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof():
                cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_8:
            if cached_entry_data[0:4] not in [
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1
            ]:
                raise RuntimeError(u'Unsupported cache entry signature')

            if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse(
                    cached_entry_data)

                cached_entry_data_size = cached_entry_struct.get(
                    'cached_entry_data_size')
                cached_entry_size = 12 + cached_entry_data_size

                cached_entry_data = value_data[
                    cached_entry_offset:cached_entry_offset +
                    cached_entry_size]

        if not cached_entry_struct:
            raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format(
                cached_entry_size))

        cached_entry_object = AppCompatCacheCachedEntry()
        cached_entry_object.cached_entry_size = cached_entry_size

        path_offset = 0
        data_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            string_size = 0
            for string_index in xrange(0, 528, 2):
                if (ord(cached_entry_data[string_index]) == 0
                        and ord(cached_entry_data[string_index + 1]) == 0):
                    break
                string_size += 2

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[0:string_size])

        elif format_type in [
                self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                self.FORMAT_TYPE_7
        ]:
            path_size = cached_entry_struct.get('path_size')
            path_offset = cached_entry_struct.get('path_offset')

        elif format_type == self.FORMAT_TYPE_8:
            path_size = cached_entry_struct.get('path_size')

            cached_entry_data_offset = 14 + path_size
            cached_entry_object.path = binary.Ut16StreamCopyToString(
                cached_entry_data[14:cached_entry_data_offset])

            remaining_data = cached_entry_data[cached_entry_data_offset:]

            cached_entry_object.insertion_flags = construct.ULInt32(
                'insertion_flags').parse(remaining_data[0:4])
            cached_entry_object.shim_flags = construct.ULInt32(
                'shim_flags').parse(remaining_data[4:8])

            if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0:
                cached_entry_data_offset += 8

            elif cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_1:
                cached_entry_data_offset += 10

            remaining_data = cached_entry_data[cached_entry_data_offset:]

        if format_type in [
                self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7
        ]:
            cached_entry_object.last_modification_time = cached_entry_struct.get(
                'last_modification_time')

        elif format_type == self.FORMAT_TYPE_8:
            cached_entry_object.last_modification_time = construct.ULInt64(
                'last_modification_time').parse(remaining_data[0:8])

        if format_type in [self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003]:
            cached_entry_object.file_size = cached_entry_struct.get(
                'file_size')

        elif format_type in [self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7]:
            cached_entry_object.insertion_flags = cached_entry_struct.get(
                'insertion_flags')
            cached_entry_object.shim_flags = cached_entry_struct.get(
                'shim_flags')

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_object.last_update_time = cached_entry_struct.get(
                'last_update_time')

        if format_type == self.FORMAT_TYPE_7:
            data_offset = cached_entry_struct.get('data_offset')
            data_size = cached_entry_struct.get('data_size')

        elif format_type == self.FORMAT_TYPE_8:
            data_offset = cached_entry_offset + cached_entry_data_offset + 12
            data_size = construct.ULInt32('data_size').parse(
                remaining_data[8:12])

        if path_offset > 0 and path_size > 0:
            path_size += path_offset

            cached_entry_object.path = binary.Ut16StreamCopyToString(
                value_data[path_offset:path_size])

        if data_size > 0:
            data_size += data_offset

            cached_entry_object.data = value_data[data_offset:data_size]

        return cached_entry_object
Exemplo n.º 26
0
class UtmpxParser(interface.FileObjectParser):
    """Parser for UTMPX files."""

    NAME = u'utmpx'
    DESCRIPTION = u'Parser for UTMPX files.'

    # INFO: Type is suppose to be a short (2 bytes),
    # however if we analyze the file it is always
    # byte follow by 3 bytes with \x00 value.
    _UTMPX_ENTRY = construct.Struct(u'utmpx_mac',
                                    construct.String(u'user', 256),
                                    construct.ULInt32(u'id'),
                                    construct.String(u'tty_name', 32),
                                    construct.ULInt32(u'pid'),
                                    construct.ULInt16(u'status_type'),
                                    construct.ULInt16(u'unknown'),
                                    construct.ULInt32(u'timestamp'),
                                    construct.ULInt32(u'microsecond'),
                                    construct.String(u'hostname', 256),
                                    construct.Padding(64))

    _UTMPX_ENTRY_SIZE = _UTMPX_ENTRY.sizeof()

    _STATUS_TYPE_SIGNATURE = 10

    def _ReadEntry(self, file_object):
        """Reads an UTMPX entry.

    Args:
      file_object: a file-like object that points to an UTMPX file.

    Returns:
      An event object (instance of UtmpxMacOsXEvent) or None if
      the UTMPX entry cannot be read.
    """
        data = file_object.read(self._UTMPX_ENTRY_SIZE)
        if len(data) != self._UTMPX_ENTRY_SIZE:
            return

        try:
            entry_struct = self._UTMPX_ENTRY.parse(data)
        except (IOError, construct.FieldError) as exception:
            logging.warning(
                u'Unable to parse Mac OS X UTMPX entry with error: {0:s}'.
                format(exception))
            return

        user, _, _ = entry_struct.user.partition(b'\x00')
        if not user:
            user = u'N/A'

        terminal, _, _ = entry_struct.tty_name.partition(b'\x00')
        if not terminal:
            terminal = u'N/A'

        computer_name, _, _ = entry_struct.hostname.partition(b'\x00')
        if not computer_name:
            computer_name = u'localhost'

        return UtmpxMacOsXEvent(entry_struct.timestamp,
                                user,
                                terminal,
                                entry_struct.status_type,
                                computer_name,
                                micro_seconds=entry_struct.microsecond)

    def _VerifyStructure(self, file_object):
        """Verify that we are dealing with an UTMPX entry.

    Args:
      file_object: a file-like object that points to an UTMPX file.

    Returns:
      True if it is a UTMPX entry or False otherwise.
    """
        # First entry is a SIGNAL entry of the file ("header").
        try:
            header_struct = self._UTMPX_ENTRY.parse_stream(file_object)
        except (IOError, construct.FieldError):
            return False
        user, _, _ = header_struct.user.partition(b'\x00')

        # The UTMPX_ENTRY structure will often successfully compile on various
        # structures, such as binary plist files, and thus we need to do some
        # additional validation. The first one is to check if the user name
        # can be converted into a Unicode string, otherwise we can assume
        # we are dealing with non UTMPX data.
        try:
            user.decode(u'utf-8')
        except UnicodeDecodeError:
            return False

        if user != b'utmpx-1.00':
            return False
        if header_struct.status_type != self._STATUS_TYPE_SIGNATURE:
            return False
        if (header_struct.timestamp != 0 or header_struct.microsecond != 0
                or header_struct.pid != 0):
            return False
        tty_name, _, _ = header_struct.tty_name.partition(b'\x00')
        hostname, _, _ = header_struct.hostname.partition(b'\x00')
        if tty_name or hostname:
            return False

        return True

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses an UTMPX file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: The file-like object to extract data from.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        if not self._VerifyStructure(file_object):
            raise errors.UnableToParseFile(u'The file is not an UTMPX file.')

        event_object = self._ReadEntry(file_object)
        while event_object:
            event_object.offset = file_object.tell()
            parser_mediator.ProduceEvent(event_object)

            event_object = self._ReadEntry(file_object)
Exemplo n.º 27
0
class BaseMRUListPlugin(interface.WindowsRegistryPlugin):
    """Class for common MRUList Windows Registry plugin functionality."""

    _MRULIST_STRUCT = construct.Range(1, 500,
                                      construct.ULInt16(u'entry_letter'))

    _SOURCE_APPEND = u': MRU List'

    @abc.abstractmethod
    def _ParseMRUListEntryValue(self, parser_mediator, registry_key,
                                entry_index, entry_letter, **kwargs):
        """Parses the MRUList entry value.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUList value.
      entry_index (int): MRUList entry index.
      entry_letter (str): character value representing the entry.

    Returns:
      str: MRUList entry value.
    """

    def _ParseMRUListValue(self, registry_key):
        """Parses the MRUList value in a given Registry key.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUList value.

    Returns:
      generator: MRUList value generator, which returns the MRU index number
          and entry value.
    """
        mru_list_value = registry_key.GetValueByName(u'MRUList')

        # The key exists but does not contain a value named "MRUList".
        if not mru_list_value:
            return enumerate([])

        try:
            mru_list = self._MRULIST_STRUCT.parse(mru_list_value.data)
        except construct.FieldError:
            logging.warning(
                u'[{0:s}] Unable to parse the MRU key: {1:s}'.format(
                    self.NAME, registry_key.path))
            return enumerate([])

        return enumerate(mru_list)

    def _ParseMRUListKey(self,
                         parser_mediator,
                         registry_key,
                         codepage=u'cp1252'):
        """Extract event objects from a MRUList Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
      codepage (Optional[str]): extended ASCII string codepage.
    """
        values_dict = {}
        for entry_index, entry_letter in self._ParseMRUListValue(registry_key):
            # TODO: detect if list ends prematurely.
            # MRU lists are terminated with \0 (0x0000).
            if entry_letter == 0:
                break

            entry_letter = chr(entry_letter)

            value_string = self._ParseMRUListEntryValue(parser_mediator,
                                                        registry_key,
                                                        entry_index,
                                                        entry_letter,
                                                        codepage=codepage)

            value_text = u'Index: {0:d} [MRU Value {1:s}]'.format(
                entry_index + 1, entry_letter)

            values_dict[value_text] = value_string

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = registry_key.offset
        event_data.regvalue = values_dict
        event_data.source_append = self._SOURCE_APPEND

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 28
0
class WinJobParser(interface.SingleFileBaseParser):
  """Parse Windows Scheduled Task files for job events."""

  NAME = u'winjob'
  DESCRIPTION = u'Parser for Windows Scheduled Task job (or At-job) files.'

  PRODUCT_VERSIONS = {
      0x0400: u'Windows NT 4.0',
      0x0500: u'Windows 2000',
      0x0501: u'Windows XP',
      0x0600: u'Windows Vista',
      0x0601: u'Windows 7',
      0x0602: u'Windows 8',
      0x0603: u'Windows 8.1'
  }

  TRIGGER_TYPES = {
      0x0000: u'ONCE',
      0x0001: u'DAILY',
      0x0002: u'WEEKLY',
      0x0003: u'MONTHLYDATE',
      0x0004: u'MONTHLYDOW',
      0x0005: u'EVENT_ON_IDLE',
      0x0006: u'EVENT_AT_SYSTEMSTART',
      0x0007: u'EVENT_AT_LOGON'
  }

  JOB_FIXED_STRUCT = construct.Struct(
      u'job_fixed',
      construct.ULInt16(u'product_version'),
      construct.ULInt16(u'file_version'),
      construct.Bytes(u'job_uuid', 16),
      construct.ULInt16(u'app_name_len_offset'),
      construct.ULInt16(u'trigger_offset'),
      construct.ULInt16(u'error_retry_count'),
      construct.ULInt16(u'error_retry_interval'),
      construct.ULInt16(u'idle_deadline'),
      construct.ULInt16(u'idle_wait'),
      construct.ULInt32(u'priority'),
      construct.ULInt32(u'max_run_time'),
      construct.ULInt32(u'exit_code'),
      construct.ULInt32(u'status'),
      construct.ULInt32(u'flags'),
      construct.ULInt16(u'ran_year'),
      construct.ULInt16(u'ran_month'),
      construct.ULInt16(u'ran_weekday'),
      construct.ULInt16(u'ran_day'),
      construct.ULInt16(u'ran_hour'),
      construct.ULInt16(u'ran_minute'),
      construct.ULInt16(u'ran_second'),
      construct.ULInt16(u'ran_millisecond'),
      )

  # Using Construct's utf-16 encoding here will create strings with their
  # null terminators exposed. Instead, we'll read these variables raw and
  # convert them using Plaso's ReadUtf16() for proper formatting.
  JOB_VARIABLE_STRUCT = construct.Struct(
      u'job_variable',
      construct.ULInt16(u'running_instance_count'),
      construct.ULInt16(u'app_name_len'),
      construct.String(
          u'app_name',
          lambda ctx: ctx.app_name_len * 2),
      construct.ULInt16(u'parameter_len'),
      construct.String(
          u'parameter',
          lambda ctx: ctx.parameter_len * 2),
      construct.ULInt16(u'working_dir_len'),
      construct.String(
          u'working_dir',
          lambda ctx: ctx.working_dir_len * 2),
      construct.ULInt16(u'username_len'),
      construct.String(
          u'username',
          lambda ctx: ctx.username_len * 2),
      construct.ULInt16(u'comment_len'),
      construct.String(
          u'comment',
          lambda ctx: ctx.comment_len * 2),
      construct.ULInt16(u'userdata_len'),
      construct.String(
          u'userdata',
          lambda ctx: ctx.userdata_len),
      construct.ULInt16(u'reserved_len'),
      construct.String(
          u'reserved',
          lambda ctx: ctx.reserved_len),
      construct.ULInt16(u'test'),
      construct.ULInt16(u'trigger_size'),
      construct.ULInt16(u'trigger_reserved1'),
      construct.ULInt16(u'sched_start_year'),
      construct.ULInt16(u'sched_start_month'),
      construct.ULInt16(u'sched_start_day'),
      construct.ULInt16(u'sched_end_year'),
      construct.ULInt16(u'sched_end_month'),
      construct.ULInt16(u'sched_end_day'),
      construct.ULInt16(u'sched_start_hour'),
      construct.ULInt16(u'sched_start_minute'),
      construct.ULInt32(u'sched_duration'),
      construct.ULInt32(u'sched_interval'),
      construct.ULInt32(u'trigger_flags'),
      construct.ULInt32(u'trigger_type'),
      construct.ULInt16(u'trigger_arg0'),
      construct.ULInt16(u'trigger_arg1'),
      construct.ULInt16(u'trigger_arg2'),
      construct.ULInt16(u'trigger_padding'),
      construct.ULInt16(u'trigger_reserved2'),
      construct.ULInt16(u'trigger_reserved3'))

  def ParseFileObject(self, parser_mediator, file_object, **kwargs):
    """Parses a Windows job file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
    try:
      header = self.JOB_FIXED_STRUCT.parse_stream(file_object)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          u'Unable to parse Windows Task Job file with error: {0:s}'.format(
              exception))

    if not header.product_version in self.PRODUCT_VERSIONS:
      raise errors.UnableToParseFile(u'Not a valid Scheduled Task file')

    if not header.file_version == 1:
      raise errors.UnableToParseFile(u'Not a valid Scheduled Task file')

    # Obtain the relevant values from the file.
    try:
      data = self.JOB_VARIABLE_STRUCT.parse_stream(file_object)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          u'Unable to parse Windows Task Job file with error: {0:s}'.format(
              exception))

    trigger_type = self.TRIGGER_TYPES.get(data.trigger_type, u'Unknown')

    last_run_date = timelib.Timestamp.FromTimeParts(
        header.ran_year,
        header.ran_month,
        header.ran_day,
        header.ran_hour,
        header.ran_minute,
        header.ran_second,
        microseconds=(header.ran_millisecond * 1000),
        timezone=parser_mediator.timezone)

    scheduled_date = timelib.Timestamp.FromTimeParts(
        data.sched_start_year,
        data.sched_start_month,
        data.sched_start_day,
        data.sched_start_hour,
        data.sched_start_minute,
        0,  # Seconds are not stored.
        timezone=parser_mediator.timezone)

    # Create two timeline events, one for created date and the other for last
    # run.
    parser_mediator.ProduceEvents(
        [WinJobEvent(
            last_run_date, eventdata.EventTimestamp.LAST_RUNTIME, data.app_name,
            data.parameter, data.working_dir, data.username, trigger_type,
            data.comment),
         WinJobEvent(
             scheduled_date, u'Scheduled To Start', data.app_name,
             data.parameter, data.working_dir, data.username, trigger_type,
             data.comment)])

    # A scheduled end date is optional.
    if data.sched_end_year:
      scheduled_end_date = timelib.Timestamp.FromTimeParts(
          data.sched_end_year,
          data.sched_end_month,
          data.sched_end_day,
          0,  # Hours are not stored.
          0,  # Minutes are not stored.
          0,  # Seconds are not stored.
          timezone=parser_mediator.timezone)

      event_object = WinJobEvent(
          scheduled_end_date, u'Scheduled To End', data.app_name,
          data.parameter, data.working_dir, data.username, trigger_type,
          data.comment)
      parser_mediator.ProduceEvent(event_object)
Exemplo n.º 29
0
class Timestamp(object):
    """Class for converting timestamps to plaso timestamps.

    The Plaso timestamp is a 64-bit signed timestamp value containing:
    micro seconds since 1970-01-01 00:00:00.

    The timestamp is not necessarily in UTC.
  """
    # The minimum timestamp in seconds
    TIMESTAMP_MIN_SECONDS = -(((1 << 63L) - 1) / 1000000)

    # The maximum timestamp in seconds
    TIMESTAMP_MAX_SECONDS = ((1 << 63L) - 1) / 1000000

    # The minimum timestamp in micro seconds
    TIMESTAMP_MIN_MICRO_SECONDS = -((1 << 63L) - 1)

    # The maximum timestamp in micro seconds
    TIMESTAMP_MAX_MICRO_SECONDS = (1 << 63L) - 1

    # Timestamp that represents the timestamp representing not
    # a date and time value.
    # TODO: replace this with a real None implementation.
    NONE_TIMESTAMP = 0

    # The days per month of a non leap year
    DAYS_PER_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]

    # The number of seconds in a day
    SECONDS_PER_DAY = 24 * 60 * 60

    # The number of micro seconds per second
    MICRO_SECONDS_PER_SECOND = 1000000

    # The multiplication factor to change milliseconds to micro seconds.
    MILLI_SECONDS_TO_MICRO_SECONDS = 1000

    # The difference between Jan 1, 1980 and Jan 1, 1970 in seconds.
    FAT_DATE_TO_POSIX_BASE = 315532800

    # The difference between Jan 1, 1601 and Jan 1, 1970 in micro seconds
    WEBKIT_TIME_TO_POSIX_BASE = 11644473600L * 1000000

    # The difference between Jan 1, 1601 and Jan 1, 1970 in 100 nanoseconds.
    FILETIME_TO_POSIX_BASE = 11644473600L * 10000000

    # The difference between Nov 10, 1582 and Jan 1, 1970 in 100 nanoseconds.
    UUID_TIME_TO_POSIX_BASE = 12219292800L * 10000000

    # The number of seconds between January 1, 1904 and Jan 1, 1970.
    # Value confirmed with sleuthkit:
    #  http://svn.sleuthkit.org/repos/sleuthkit/trunk/tsk3/fs/tsk_hfs.h
    # and linux source file linux/include/linux/hfsplus_fs.h
    HFSTIME_TO_POSIX_BASE = 2082844800

    # The number of seconds between January 1, 1970 and January 1, 2001.
    # As specified in:
    # https://developer.apple.com/library/ios/documentation/
    #       cocoa/Conceptual/DatesAndTimes/Articles/dtDates.html
    COCOA_TIME_TO_POSIX_BASE = 978307200

    # The difference between POSIX (Jan 1, 1970) and DELPHI (Dec 30, 1899).
    # http://docwiki.embarcadero.com/Libraries/XE3/en/System.TDateTime
    DELPHI_TIME_TO_POSIX_BASE = 25569

    # The Windows SYSTEMTIME structure.
    SYSTEMTIME_STRUCT = construct.Struct(u'timestamp',
                                         construct.ULInt16(u'year'),
                                         construct.ULInt16(u'month'),
                                         construct.ULInt16(u'weekday'),
                                         construct.ULInt16(u'day'),
                                         construct.ULInt16(u'hour'),
                                         construct.ULInt16(u'minutes'),
                                         construct.ULInt16(u'seconds'),
                                         construct.ULInt16(u'milliseconds'))

    @classmethod
    def CopyFromString(cls, time_string):
        """Copies a timestamp from a string containing a date and time value.

    Args:
      time_string: A string containing a date and time value formatted as:
                   YYYY-MM-DD hh:mm:ss.######[+-]##:##
                   Where # are numeric digits ranging from 0 to 9 and the
                   seconds fraction can be either 3 or 6 digits. The time
                   of day, seconds fraction and timezone offset are optional.
                   The default timezone is UTC.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC.

    Raises:
      ValueError: if the time string is invalid or not supported.
    """
        if not time_string:
            raise ValueError(u'Invalid time string.')

        time_string_length = len(time_string)

        # The time string should at least contain 'YYYY-MM-DD'.
        if (time_string_length < 10 or time_string[4] != u'-'
                or time_string[7] != u'-'):
            raise ValueError(u'Invalid time string.')

        # If a time of day is specified the time string it should at least
        # contain 'YYYY-MM-DD hh:mm:ss'.
        if (time_string_length > 10
                and (time_string_length < 19 or time_string[10] != u' '
                     or time_string[13] != u':' or time_string[16] != u':')):
            raise ValueError(u'Invalid time string.')

        try:
            year = int(time_string[0:4], 10)
        except ValueError:
            raise ValueError(u'Unable to parse year.')

        try:
            month = int(time_string[5:7], 10)
        except ValueError:
            raise ValueError(u'Unable to parse month.')

        if month not in range(1, 13):
            raise ValueError(u'Month value out of bounds.')

        try:
            day_of_month = int(time_string[8:10], 10)
        except ValueError:
            raise ValueError(u'Unable to parse day of month.')

        if day_of_month not in range(1, 32):
            raise ValueError(u'Day of month value out of bounds.')

        hours = 0
        minutes = 0
        seconds = 0

        if time_string_length > 10:
            try:
                hours = int(time_string[11:13], 10)
            except ValueError:
                raise ValueError(u'Unable to parse hours.')

            if hours not in range(0, 24):
                raise ValueError(u'Hours value out of bounds.')

            try:
                minutes = int(time_string[14:16], 10)
            except ValueError:
                raise ValueError(u'Unable to parse minutes.')

            if minutes not in range(0, 60):
                raise ValueError(u'Minutes value out of bounds.')

            try:
                seconds = int(time_string[17:19], 10)
            except ValueError:
                raise ValueError(u'Unable to parse day of seconds.')

            if seconds not in range(0, 60):
                raise ValueError(u'Seconds value out of bounds.')

        micro_seconds = 0
        timezone_offset = 0

        if time_string_length > 19:
            if time_string[19] != u'.':
                timezone_index = 19
            else:
                for timezone_index in range(19, time_string_length):
                    if time_string[timezone_index] in [u'+', u'-']:
                        break

                    # The calculation that follow rely on the timezone index to point
                    # beyond the string in case no timezone offset was defined.
                    if timezone_index == time_string_length - 1:
                        timezone_index += 1

            if timezone_index > 19:
                fraction_of_seconds_length = timezone_index - 20
                if fraction_of_seconds_length not in [3, 6]:
                    raise ValueError(u'Invalid time string.')

                try:
                    micro_seconds = int(time_string[20:timezone_index], 10)
                except ValueError:
                    raise ValueError(u'Unable to parse fraction of seconds.')

                if fraction_of_seconds_length == 3:
                    micro_seconds *= 1000

            if timezone_index < time_string_length:
                if (time_string_length - timezone_index != 6
                        or time_string[timezone_index + 3] != u':'):
                    raise ValueError(u'Invalid time string.')

                try:
                    timezone_offset = int(time_string[timezone_index +
                                                      1:timezone_index + 3])
                except ValueError:
                    raise ValueError(u'Unable to parse timezone hours offset.')

                if timezone_offset not in range(0, 24):
                    raise ValueError(
                        u'Timezone hours offset value out of bounds.')

                # Note that when the sign of the timezone offset is negative
                # the difference needs to be added. We do so by flipping the sign.
                if time_string[timezone_index] == u'-':
                    timezone_offset *= 60
                else:
                    timezone_offset *= -60

                try:
                    timezone_offset += int(time_string[timezone_index +
                                                       4:timezone_index + 6])
                except ValueError:
                    raise ValueError(
                        u'Unable to parse timezone minutes offset.')

                timezone_offset *= 60

        timestamp = int(
            calendar.timegm(
                (year, month, day_of_month, hours, minutes, seconds)))

        return ((timestamp + timezone_offset) * 1000000) + micro_seconds

    @classmethod
    def CopyToDatetime(cls, timestamp, timezone, raise_error=False):
        """Copies the timestamp to a datetime object.

    Args:
      timestamp: The timestamp which is an integer containing the number
                 of micro seconds since January 1, 1970, 00:00:00 UTC.
      timezone: The timezone (pytz.timezone) object.
      raise_error: Boolean that if set to True will not absorb an OverflowError
                   if the timestamp is out of bounds. By default there will be
                   no error raised.

    Returns:
      A datetime object (instance of datetime.datetime). A datetime object of
      January 1, 1970 00:00:00 UTC is returned on error if raises_error is
      not set.

    Raises:
      OverflowError: If raises_error is set to True and an overflow error
                     occurs.
    """
        datetime_object = datetime.datetime(1970,
                                            1,
                                            1,
                                            0,
                                            0,
                                            0,
                                            0,
                                            tzinfo=pytz.UTC)
        try:
            datetime_object += datetime.timedelta(microseconds=timestamp)
            return datetime_object.astimezone(timezone)
        except OverflowError as exception:
            if raise_error:
                raise

            logging.error(
                (u'Unable to copy {0:d} to a datetime object with error: '
                 u'{1:s}').format(timestamp, exception))

        return datetime_object

    @classmethod
    def CopyToIsoFormat(cls, timestamp, timezone=pytz.UTC, raise_error=False):
        """Copies the timestamp to an ISO 8601 formatted string.

    Args:
      timestamp: The timestamp which is an integer containing the number
                 of micro seconds since January 1, 1970, 00:00:00 UTC.
      timezone: Optional timezone (instance of pytz.timezone).
      raise_error: Boolean that if set to True will not absorb an OverflowError
                   if the timestamp is out of bounds. By default there will be
                   no error raised.

    Returns:
      A string containing an ISO 8601 formatted date and time.
    """
        datetime_object = cls.CopyToDatetime(timestamp,
                                             timezone,
                                             raise_error=raise_error)
        return datetime_object.isoformat()

    @classmethod
    def CopyToPosix(cls, timestamp):
        """Converts microsecond timestamps to POSIX timestamps.

    Args:
      timestamp: The timestamp which is an integer containing the number
                 of micro seconds since January 1, 1970, 00:00:00 UTC.

    Returns:
      The timestamp which is an integer containing the number of seconds
      since January 1, 1970, 00:00:00 UTC.
    """
        return timestamp // cls.MICRO_SECONDS_PER_SECOND

    @classmethod
    def DaysInMonth(cls, month, year):
        """Determines the days in a month for a specific year.

    Args:
      month: The month where 0 represents January.
      year: The year as in 1970.

    Returns:
      An integer containing the number of days in the month.

    Raises:
      ValueError: if the month value is invalid.
    """
        if month not in range(0, 12):
            raise ValueError(u'Invalid month value')

        days_per_month = cls.DAYS_PER_MONTH[month]

        if month == 1 and cls.IsLeapYear(year):
            days_per_month += 1

        return days_per_month

    @classmethod
    def DaysInYear(cls, year):
        """Determines the days in a year.

    Args:
      year: The year as in 1970.

    Returns:
      An integer containing the number of days in the year.
    """
        days_in_year = 365
        if cls.IsLeapYear(year):
            return days_in_year + 1
        return days_in_year

    @classmethod
    def DayOfYear(cls, day, month, year):
        """Determines the day of the year for a specific day of a month in a year.

    Args:
      day: The day of the month where 0 represents the first day.
      month: The month where 0 represents January.
      year: The year as in 1970.

    Returns:
      An integer containing the day of year.
    """
        day_of_year = day

        for past_month in range(0, month):
            day_of_year += cls.DaysInMonth(past_month, year)

        return day_of_year

    @classmethod
    def FromCocoaTime(cls, cocoa_time):
        """Converts a Cocoa time to a timestamp.

    In Cocoa, time and date values are stored in a unsigned 32-bit integer
    containing the number of seconds since January 1, 2001 at 00:00:00
    (midnight) UTC (GMT).

    Args:
      cocoa_time: The timestamp in Cocoa format.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        return cls.FromPosixTime(cocoa_time + cls.COCOA_TIME_TO_POSIX_BASE)

    @classmethod
    def FromDelphiTime(cls, delphi_time):
        """Converts a Delphi time to a timestamp.

    In Delphi, time and date values (TDateTime)
    are stored in a unsigned little endian 64-bit
    floating point containing the number of seconds
    since December 30, 1899 at 00:00:00 (midnight) Local Timezone.
    TDateTime does not have any time zone information.

    Args:
      delphi_time: The timestamp in Delphi format.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        posix_time = (delphi_time - cls.DELPHI_TIME_TO_POSIX_BASE) * 86400.0
        if (posix_time < cls.TIMESTAMP_MIN_SECONDS
                or posix_time > cls.TIMESTAMP_MAX_SECONDS):
            return 0

        return cls.FromPosixTime(int(posix_time))

    @classmethod
    def FromFatDateTime(cls, fat_date_time):
        """Converts a FAT date and time into a timestamp.

    FAT date time is mainly used in DOS/Windows file formats and FAT.

    The FAT date and time is a 32-bit value containing two 16-bit values:
      * The date (lower 16-bit).
        * bits 0 - 4:  day of month, where 1 represents the first day
        * bits 5 - 8:  month of year, where 1 represent January
        * bits 9 - 15: year since 1980
      * The time of day (upper 16-bit).
        * bits 0 - 4: seconds (in 2 second intervals)
        * bits 5 - 10: minutes
        * bits 11 - 15: hours

    Args:
      fat_date_time: The 32-bit FAT date time.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        number_of_seconds = cls.FAT_DATE_TO_POSIX_BASE

        day_of_month = (fat_date_time & 0x1f) - 1
        month = ((fat_date_time >> 5) & 0x0f) - 1
        year = (fat_date_time >> 9) & 0x7f

        if day_of_month < 0 or day_of_month > 30 or month < 0 or month > 11:
            return 0

        number_of_days = cls.DayOfYear(day_of_month, month, 1980 + year)
        for past_year in range(0, year):
            number_of_days += cls.DaysInYear(past_year)

        fat_date_time >>= 16

        seconds = (fat_date_time & 0x1f) * 2
        minutes = (fat_date_time >> 5) & 0x3f
        hours = (fat_date_time >> 11) & 0x1f

        if hours > 23 or minutes > 59 or seconds > 59:
            return 0

        number_of_seconds += (((hours * 60) + minutes) * 60) + seconds

        number_of_seconds += number_of_days * cls.SECONDS_PER_DAY

        return number_of_seconds * cls.MICRO_SECONDS_PER_SECOND

    @classmethod
    def FromFiletime(cls, filetime):
        """Converts a FILETIME into a timestamp.

    FILETIME is mainly used in Windows file formats and NTFS.

    The FILETIME is a 64-bit value containing:
      100th nano seconds since 1601-01-01 00:00:00

    Technically FILETIME consists of 2 x 32-bit parts and is presumed
    to be unsigned.

    Args:
      filetime: The 64-bit FILETIME timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        # TODO: Add a handling for if the timestamp equals to zero.
        if filetime < 0:
            return 0
        timestamp = (filetime - cls.FILETIME_TO_POSIX_BASE) / 10

        if timestamp > cls.TIMESTAMP_MAX_MICRO_SECONDS:
            return 0
        return timestamp

    @classmethod
    def FromHfsTime(cls, hfs_time, timezone=pytz.UTC, is_dst=False):
        """Converts a HFS time to a timestamp.

    HFS time is the same as HFS+ time, except stored in the local
    timezone of the user.

    Args:
      hfs_time: Timestamp in the hfs format (32 bit unsigned int).
      timezone: The timezone object of the system's local time.
      is_dst: A boolean to indicate the timestamp is corrected for daylight
              savings time (DST) only used for the DST transition period.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        timestamp_local = cls.FromHfsPlusTime(hfs_time)
        return cls.LocaltimeToUTC(timestamp_local, timezone, is_dst)

    @classmethod
    def FromHfsPlusTime(cls, hfs_time):
        """Converts a HFS+ time to a timestamp.

    In HFS+ date and time values are stored in an unsigned 32-bit integer
    containing the number of seconds since January 1, 1904 at 00:00:00
    (midnight) UTC (GMT).

    Args:
      hfs_time: The timestamp in HFS+ format.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        return cls.FromPosixTime(hfs_time - cls.HFSTIME_TO_POSIX_BASE)

    @classmethod
    def FromJavaTime(cls, java_time):
        """Converts a Java time to a timestamp.

    Jave time is the number of milliseconds since
    January 1, 1970, 00:00:00 UTC.

    URL: http://docs.oracle.com/javase/7/docs/api/
         java/sql/Timestamp.html#getTime%28%29

    Args:
      java_time: The Java Timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        return java_time * cls.MILLI_SECONDS_TO_MICRO_SECONDS

    @classmethod
    def FromPosixTime(cls, posix_time):
        """Converts a POSIX timestamp into a timestamp.

    The POSIX time is a signed 32-bit or 64-bit value containing:
      seconds since 1970-01-01 00:00:00

    Args:
      posix_time: The POSIX timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        if (posix_time < cls.TIMESTAMP_MIN_SECONDS
                or posix_time > cls.TIMESTAMP_MAX_SECONDS):
            return 0
        return int(posix_time) * cls.MICRO_SECONDS_PER_SECOND

    @classmethod
    def FromPosixTimeWithMicrosecond(cls, posix_time, microsecond):
        """Converts a POSIX timestamp with microsecond into a timestamp.

    The POSIX time is a signed 32-bit or 64-bit value containing:
      seconds since 1970-01-01 00:00:00

    Args:
      posix_time: The POSIX timestamp.
      microsecond: The microseconds to add to the timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        timestamp = cls.FromPosixTime(posix_time)
        if not timestamp:
            return 0
        return timestamp + microsecond

    @classmethod
    def FromPythonDatetime(cls, datetime_object):
        """Converts a Python datetime object into a timestamp.

    Args:
      datetime_object: The datetime object (instance of datetime.datetime).

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        if not isinstance(datetime_object, datetime.datetime):
            return 0

        posix_time = int(calendar.timegm(datetime_object.utctimetuple()))
        return cls.FromPosixTime(posix_time) + datetime_object.microsecond

    @classmethod
    def FromRFC2579Datetime(cls, year, month, day, hour, minutes, seconds,
                            deciseconds, direction_from_utc, hours_from_utc,
                            minutes_from_utc):
        """Converts values from an RFC2579 time to a timestamp.

    See https://tools.ietf.org/html/rfc2579.

    Args:
      year: An integer representing the year.
      month: An integer between 1 and 12.
      day: An integer representing the number of day in the month.
      hour: An integer representing the hour, 0 <= hour < 24.
      minutes: An integer, 0 <= minute < 60.
      seconds: An integer, 0 <= second < 60.
      deciseconds: An integer, 0 <= deciseconds < 10
      direction_from_utc: An ascii character, either '+' or '-'.
      hours_from_utc: An integer representing the number of hours the time is
                      offset from UTC.
      minutes_from_utc: An integer representing the number of seconds the time
                        is offset from UTC.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.

    Raises:
      TimestampError: if the timestamp cannot be created from the time parts.
    """
        microseconds = deciseconds * 100000
        utc_offset_minutes = (hours_from_utc * 60) + minutes_from_utc
        if direction_from_utc == u'-':
            utc_offset_minutes = -utc_offset_minutes
        timezone = pytz.FixedOffset(utc_offset_minutes)
        return cls.FromTimeParts(year, month, day, hour, minutes, seconds,
                                 microseconds, timezone)

    @classmethod
    def FromSystemtime(cls, systemtime):
        """Converts a SYSTEMTIME structure into a timestamp.

    The SYSTEMTIME structure is a 128-bit struct containing 8 little endian
    16-bit integers structured like so:
      struct {
        WORD year,
        WORD month,
        WORD day_of_week,
        WORD day,
        WORD hour,
        WORD minute,
        WORD second,
        WORD millisecond
      }

    Args:
      systemtime (bytes): 128-bit SYSTEMTIME timestamp value.

    Returns:
      int: timestamp, which contains the number of micro seconds since
          January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        try:
            timestamp = cls.SYSTEMTIME_STRUCT.parse(systemtime)
        except construct.ConstructError as exception:
            raise errors.TimestampError(
                u'Unable to create timestamp from {0:s} with error: {1:s}'.
                format(systemtime, exception))
        return cls.FromTimeParts(
            year=timestamp.year,
            month=timestamp.month,
            day=timestamp.day,
            hour=timestamp.hour,
            minutes=timestamp.minutes,
            seconds=timestamp.seconds,
            microseconds=(timestamp.milliseconds *
                          cls.MILLI_SECONDS_TO_MICRO_SECONDS))

    @classmethod
    def FromTimeParts(cls,
                      year,
                      month,
                      day,
                      hour,
                      minutes,
                      seconds,
                      microseconds=0,
                      timezone=pytz.UTC):
        """Converts a list of time entries to a timestamp.

    Args:
      year: An integer representing the year.
      month: An integer between 1 and 12.
      day: An integer representing the number of day in the month.
      hour: An integer representing the hour, 0 <= hour < 24.
      minutes: An integer, 0 <= minute < 60.
      seconds: An integer, 0 <= second < 60.
      microseconds: Optional number of microseconds ranging from:
                    0 <= microsecond < 1000000.
      timezone: Optional timezone (instance of pytz.timezone).

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.

    Raises:
      TimestampError: if the timestamp cannot be created from the time parts.
    """
        try:
            date = datetime.datetime(year, month, day, hour, minutes, seconds,
                                     microseconds)
        except ValueError as exception:
            raise errors.TimestampError(
                (u'Unable to create timestamp from {0:04d}-{1:02d}-{2:02d} '
                 u'{3:02d}:{4:02d}:{5:02d}.{6:06d} with error: {7:s}').format(
                     year, month, day, hour, minutes, seconds, microseconds,
                     exception))

        if isinstance(timezone, py2to3.STRING_TYPES):
            timezone = pytz.timezone(timezone)

        date_use = timezone.localize(date)
        posix_time = int(calendar.timegm(date_use.utctimetuple()))

        return cls.FromPosixTime(posix_time) + microseconds

    @classmethod
    def FromTimeString(cls,
                       time_string,
                       dayfirst=False,
                       gmt_as_timezone=True,
                       timezone=pytz.UTC):
        """Converts a string containing a date and time value into a timestamp.

    Args:
      time_string: String that contains a date and time value.
      dayfirst: An optional boolean argument. If set to true then the
                parser will change the precedence in which it parses timestamps
                from MM-DD-YYYY to DD-MM-YYYY (and YYYY-MM-DD will be
                YYYY-DD-MM, etc).
      gmt_as_timezone: Sometimes the dateutil parser will interpret GMT and UTC
                       the same way, that is not make a distinction. By default
                       this is set to true, that is GMT can be interpreted
                       differently than UTC. If that is not the expected result
                       this attribute can be set to false.
      timezone: Optional timezone object (instance of pytz.timezone) that
                the data and time value in the string represents. This value
                is used when the timezone cannot be determined from the string.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.

    Raises:
      TimestampError: if the time string could not be parsed.
    """
        if not gmt_as_timezone and time_string.endswith(' GMT'):
            time_string = u'{0:s}UTC'.format(time_string[:-3])

        try:
            # TODO: deprecate the use of dateutil parser.
            datetime_object = dateutil.parser.parse(time_string,
                                                    dayfirst=dayfirst)

        except (TypeError, ValueError) as exception:
            raise errors.TimestampError((
                u'Unable to convert time string: {0:s} in to a datetime object '
                u'with error: {1:s}').format(time_string, exception))

        if datetime_object.tzinfo:
            datetime_object = datetime_object.astimezone(pytz.UTC)
        else:
            datetime_object = timezone.localize(datetime_object)

        return cls.FromPythonDatetime(datetime_object)

    @classmethod
    def FromUUIDTime(cls, uuid_time):
        """Converts a UUID verion 1 time into a timestamp.

    The UUID version 1 time is a 60-bit value containing:
      100th nano seconds since 1582-10-15 00:00:00

    Args:
      uuid_time: The 60-bit UUID version 1 timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        # TODO: Add a handling for if the timestamp equals to zero.
        if uuid_time < 0:
            return 0
        timestamp = (uuid_time - cls.UUID_TIME_TO_POSIX_BASE) / 10

        if timestamp > cls.TIMESTAMP_MAX_MICRO_SECONDS:
            return 0
        return timestamp

    @classmethod
    def FromWebKitTime(cls, webkit_time):
        """Converts a WebKit time into a timestamp.

    The WebKit time is a 64-bit value containing:
      micro seconds since 1601-01-01 00:00:00

    Args:
      webkit_time: The 64-bit WebKit time timestamp.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        if webkit_time < (cls.TIMESTAMP_MIN_MICRO_SECONDS +
                          cls.WEBKIT_TIME_TO_POSIX_BASE):
            return 0
        return webkit_time - cls.WEBKIT_TIME_TO_POSIX_BASE

    @classmethod
    def GetNow(cls):
        """Retrieves the current time (now) as a timestamp in UTC.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC.
    """
        time_elements = time.gmtime()
        return calendar.timegm(time_elements) * 1000000

    @classmethod
    def IsLeapYear(cls, year):
        """Determines if a year is a leap year.

       A leap year is divisible by 4 and not by 100 or by 400.

    Args:
      year: The year as in 1970.

    Returns:
      A boolean value indicating the year is a leap year.
    """
        return (year % 4 == 0 and year % 100 != 0) or year % 400 == 0

    @classmethod
    def LocaltimeToUTC(cls, timestamp, timezone, is_dst=False):
        """Converts the timestamp in localtime of the timezone to UTC.

    Args:
      timestamp: The timestamp which is an integer containing the number
                 of micro seconds since January 1, 1970, 00:00:00 UTC.
      timezone: The timezone (pytz.timezone) object.
      is_dst: A boolean to indicate the timestamp is corrected for daylight
              savings time (DST) only used for the DST transition period.

    Returns:
      The timestamp which is an integer containing the number of micro seconds
      since January 1, 1970, 00:00:00 UTC or 0 on error.
    """
        if timezone and timezone != pytz.UTC:
            datetime_object = (
                datetime.datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=None) +
                datetime.timedelta(microseconds=timestamp))

            # Check if timezone is UTC since utcoffset() does not support is_dst
            # for UTC and will raise.
            datetime_delta = timezone.utcoffset(datetime_object, is_dst=is_dst)
            seconds_delta = int(datetime_delta.total_seconds())
            timestamp -= seconds_delta * cls.MICRO_SECONDS_PER_SECOND

        return timestamp

    @classmethod
    def RoundToSeconds(cls, timestamp):
        """Takes a timestamp value and rounds it to a second precision."""
        leftovers = timestamp % cls.MICRO_SECONDS_PER_SECOND
        scrubbed = timestamp - leftovers
        rounded = round(float(leftovers) / cls.MICRO_SECONDS_PER_SECOND)

        return int(scrubbed + rounded * cls.MICRO_SECONDS_PER_SECOND)
Exemplo n.º 30
0
class NetworksPlugin(interface.WindowsRegistryPlugin):
    """Windows Registry plugin for parsing the NetworkList key."""

    NAME = 'networks'
    DESCRIPTION = 'Parser for NetworkList data.'

    FILTERS = frozenset([
        interface.WindowsRegistryKeyPathFilter(
            'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion'
            '\\NetworkList')
    ])

    _CONNECTION_TYPE = {0x06: 'Wired', 0x17: '3g', 0x47: 'Wireless'}

    _EMPTY_SYSTEM_TIME_TUPLE = (0, 0, 0, 0, 0, 0, 0, 0)

    _SYSTEMTIME_STRUCT = construct.Struct('systemtime',
                                          construct.ULInt16('year'),
                                          construct.ULInt16('month'),
                                          construct.ULInt16('day_of_week'),
                                          construct.ULInt16('day_of_month'),
                                          construct.ULInt16('hours'),
                                          construct.ULInt16('minutes'),
                                          construct.ULInt16('seconds'),
                                          construct.ULInt16('milliseconds'))

    def _GetNetworkInfo(self, signatures_key):
        """Retrieves the network info within the signatures subkey.

    Args:
      signatures_key (dfwinreg.WinRegistryKey): a Windows Registry key.

    Returns:
      A dictionary containing tuples (default_gateway_mac, dns_suffix) hashed
      by profile guid.
    """
        network_info = {}
        for category in signatures_key.GetSubkeys():
            for signature in category.GetSubkeys():
                profile_guid_value = signature.GetValueByName('ProfileGuid')
                if profile_guid_value:
                    profile_guid = profile_guid_value.GetDataAsObject()
                else:
                    continue

                default_gateway_mac_value = signature.GetValueByName(
                    'DefaultGatewayMac')
                if default_gateway_mac_value:
                    default_gateway_mac = default_gateway_mac_value.GetDataAsObject(
                    )
                    default_gateway_mac = ':'.join(
                        map(binascii.hexlify, default_gateway_mac))
                else:
                    default_gateway_mac = None

                dns_suffix_value = signature.GetValueByName('DnsSuffix')
                if dns_suffix_value:
                    dns_suffix = dns_suffix_value.GetDataAsObject()
                else:
                    dns_suffix = None

                network_info[profile_guid] = (default_gateway_mac, dns_suffix)

        return network_info

    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        network_info = {}
        signatures = registry_key.GetSubkeyByName('Signatures')
        if signatures:
            network_info = self._GetNetworkInfo(signatures)

        profiles = registry_key.GetSubkeyByName('Profiles')
        if not profiles:
            return

        for subkey in profiles.GetSubkeys():
            default_gateway_mac, dns_suffix = network_info.get(
                subkey.name, (None, None))

            event_data = WindowsRegistryNetworkEventData()
            event_data.default_gateway_mac = default_gateway_mac
            event_data.dns_suffix = dns_suffix

            ssid_value = subkey.GetValueByName('ProfileName')
            if ssid_value:
                event_data.ssid = ssid_value.GetDataAsObject()

            description_value = subkey.GetValueByName('Description')
            if description_value:
                event_data.description = description_value.GetDataAsObject()

            connection_type_value = subkey.GetValueByName('NameType')
            if connection_type_value:
                connection_type = connection_type_value.GetDataAsObject()
                # TODO: move to formatter.
                connection_type = self._CONNECTION_TYPE.get(
                    connection_type, 'unknown')
                event_data.connection_type = connection_type

            date_created_value = subkey.GetValueByName('DateCreated')
            if date_created_value:
                try:
                    systemtime_struct = self._SYSTEMTIME_STRUCT.parse(
                        date_created_value.data)
                except construct.ConstructError as exception:
                    systemtime_struct = None
                    parser_mediator.ProduceExtractionError(
                        'unable to parse date created with error: {0!s}'.
                        format(exception))

                system_time_tuple = self._EMPTY_SYSTEM_TIME_TUPLE
                if systemtime_struct:
                    system_time_tuple = (systemtime_struct.year,
                                         systemtime_struct.month,
                                         systemtime_struct.day_of_week,
                                         systemtime_struct.day_of_month,
                                         systemtime_struct.hours,
                                         systemtime_struct.minutes,
                                         systemtime_struct.seconds,
                                         systemtime_struct.milliseconds)

                date_time = None
                if system_time_tuple != self._EMPTY_SYSTEM_TIME_TUPLE:
                    try:
                        date_time = dfdatetime_systemtime.Systemtime(
                            system_time_tuple=system_time_tuple)
                    except ValueError:
                        parser_mediator.ProduceExtractionError(
                            'invalid system time: {0!s}'.format(
                                system_time_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_CREATION)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            date_last_connected_value = subkey.GetValueByName(
                'DateLastConnected')
            if date_last_connected_value:
                try:
                    systemtime_struct = self._SYSTEMTIME_STRUCT.parse(
                        date_last_connected_value.data)
                except construct.ConstructError as exception:
                    systemtime_struct = None
                    parser_mediator.ProduceExtractionError(
                        'unable to parse date last connected with error: {0!s}'
                        .format(exception))

                system_time_tuple = self._EMPTY_SYSTEM_TIME_TUPLE
                if systemtime_struct:
                    system_time_tuple = (systemtime_struct.year,
                                         systemtime_struct.month,
                                         systemtime_struct.day_of_week,
                                         systemtime_struct.day_of_month,
                                         systemtime_struct.hours,
                                         systemtime_struct.minutes,
                                         systemtime_struct.seconds,
                                         systemtime_struct.milliseconds)

                date_time = None
                if system_time_tuple != self._EMPTY_SYSTEM_TIME_TUPLE:
                    try:
                        date_time = dfdatetime_systemtime.Systemtime(
                            system_time_tuple=system_time_tuple)
                    except ValueError:
                        parser_mediator.ProduceExtractionError(
                            'invalid system time: {0!s}'.format(
                                system_time_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)