コード例 #1
0
  def _GetLatestYearFromFileEntry(self):
    """Retrieves the maximum (highest value) year from the file entry.

    This function uses the modification time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'mtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine modification year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine creation year from file stat '
          'information with error: {0!s}').format(exception))
      return None
コード例 #2
0
def PyParseIntCast(string, location, tokens):
    """Return an integer from a string.

  This is a pyparsing callback method that converts the matched
  string into an integer.

  The method modifies the content of the tokens list and converts
  them all to an integer value.

  Args:
    string (str): original string.
    location (int): location in the string where the match was made.
    tokens (list[str]): extracted tokens, where the string to be converted
        is stored.
  """
    # Cast the regular tokens.
    for index, token in enumerate(tokens):
        try:
            tokens[index] = int(token)
        except ValueError:
            logger.error(
                'Unable to cast [{0:s}] to an int, setting to 0'.format(token))
            tokens[index] = 0

    # We also need to cast the dictionary built tokens.
    for key in tokens.keys():
        try:
            tokens[key] = int(tokens[key], 10)
        except ValueError:
            logger.error(
                'Unable to cast [{0:s} = {1:d}] to an int, setting to 0'.
                format(key, tokens[key]))
            tokens[key] = 0
コード例 #3
0
ファイル: text_parser.py プロジェクト: aguilajesus/plaso
def PyParseIntCast(string, location, tokens):
  """Return an integer from a string.

  This is a pyparsing callback method that converts the matched
  string into an integer.

  The method modifies the content of the tokens list and converts
  them all to an integer value.

  Args:
    string (str): original string.
    location (int): location in the string where the match was made.
    tokens (list[str]): extracted tokens, where the string to be converted
        is stored.
  """
  # Cast the regular tokens.
  for index, token in enumerate(tokens):
    try:
      tokens[index] = int(token)
    except ValueError:
      logger.error('Unable to cast [{0:s}] to an int, setting to 0'.format(
          token))
      tokens[index] = 0

  # We also need to cast the dictionary built tokens.
  for key in tokens.keys():
    try:
      tokens[key] = int(tokens[key], 10)
    except ValueError:
      logger.error(
          'Unable to cast [{0:s} = {1:d}] to an int, setting to 0'.format(
              key, tokens[key]))
      tokens[key] = 0
コード例 #4
0
ファイル: mediator.py プロジェクト: log2timeline/plaso
  def _GetLatestYearFromFileEntry(self):
    """Retrieves the maximum (highest value) year from the file entry.

    This function uses the modification time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'mtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine modification year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine creation year from file stat '
          'information with error: {0!s}').format(exception))
      return None
コード例 #5
0
    def _IsText(self, bytes_in, encoding=None):
        """Examine the bytes in and determine if they are indicative of text.

    Parsers need quick and at least semi reliable method of discovering whether
    or not a particular byte stream is text or resembles text or not. This can
    be used in text parsers to determine if a file is a text file or not for
    instance.

    The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or
    method supplied character encoding. Otherwise it will make the assumption
    the byte sequence is not text, but a byte sequence.

    Args:
      bytes_in (bytes|str): byte stream to examine.
      encoding (Optional[str]): encoding to test, if not defined ASCII and
          UTF-8 are tried.

    Returns:
      bool: True if the bytes stream contains text.
    """
        # TODO: Improve speed and accuracy of this method.
        # Start with the assumption we are dealing with text.
        is_text = True

        if isinstance(bytes_in, py2to3.UNICODE_TYPE):
            return is_text

        # Check if this is ASCII text string.
        for value in bytes_in:
            if py2to3.PY_2:
                value = ord(value)
            if not 31 < value < 128:
                is_text = False
                break

        # We have an ASCII string.
        if is_text:
            return is_text

        # Check if this is UTF-8
        try:
            bytes_in.decode('utf-8')
            return True

        except UnicodeDecodeError:
            pass

        if encoding:
            try:
                bytes_in.decode(encoding)
                return True

            except LookupError:
                logger.error('Unsupported encoding: {0:s}'.format(encoding))
            except UnicodeDecodeError:
                pass

        return False
コード例 #6
0
ファイル: text_parser.py プロジェクト: aguilajesus/plaso
  def _IsText(self, bytes_in, encoding=None):
    """Examine the bytes in and determine if they are indicative of text.

    Parsers need quick and at least semi reliable method of discovering whether
    or not a particular byte stream is text or resembles text or not. This can
    be used in text parsers to determine if a file is a text file or not for
    instance.

    The method assumes the byte sequence is either ASCII, UTF-8, UTF-16 or
    method supplied character encoding. Otherwise it will make the assumption
    the byte sequence is not text, but a byte sequence.

    Args:
      bytes_in (bytes|str): byte stream to examine.
      encoding (Optional[str]): encoding to test, if not defined ASCII and
          UTF-8 are tried.

    Returns:
      bool: True if the bytes stream contains text.
    """
    # TODO: Improve speed and accuracy of this method.
    # Start with the assumption we are dealing with text.
    is_text = True

    if isinstance(bytes_in, py2to3.UNICODE_TYPE):
      return is_text

    # Check if this is ASCII text string.
    for value in bytes_in:
      if py2to3.PY_2:
        value = ord(value)
      if not 31 < value < 128:
        is_text = False
        break

    # We have an ASCII string.
    if is_text:
      return is_text

    # Check if this is UTF-8
    try:
      bytes_in.decode('utf-8')
      return True

    except UnicodeDecodeError:
      pass

    if encoding:
      try:
        bytes_in.decode(encoding)
        return True

      except LookupError:
        logger.error('Unsupported encoding: {0:s}'.format(encoding))
      except UnicodeDecodeError:
        pass

    return False
コード例 #7
0
ファイル: mediator.py プロジェクト: puccia/plaso
    def _GetEarliestYearFromFileEntry(self):
        """Retrieves the year from the file entry date and time values.

    This function uses the creation time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
        file_entry = self.GetFileEntry()
        if not file_entry:
            return None

        stat_object = file_entry.GetStat()

        posix_time = getattr(stat_object, 'crtime', None)
        if posix_time is None:
            posix_time = getattr(stat_object, 'ctime', None)

        # Gzip files don't store the creation or metadata modification times,
        # but the modification time stored in the file is a good proxy.
        if file_entry.TYPE_INDICATOR == dfvfs_definitions.TYPE_INDICATOR_GZIP:
            posix_time = getattr(stat_object, 'mtime', None)

        if posix_time is None:
            logger.warning(
                'Unable to determine earliest year from file stat information.'
            )
            return None

        try:
            year = timelib.GetYearFromPosixTime(
                posix_time, timezone=self._knowledge_base.timezone)
            return year
        except ValueError as exception:
            logger.error((
                'Unable to determine earliest year from file stat information with '
                'error: {0:s}').format(exception))
            return None
コード例 #8
0
ファイル: mediator.py プロジェクト: log2timeline/plaso
  def _GetEarliestYearFromFileEntry(self):
    """Retrieves the year from the file entry date and time values.

    This function uses the creation time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'crtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    # Gzip files don't store the creation or metadata modification times,
    # but the modification time stored in the file is a good proxy.
    if file_entry.TYPE_INDICATOR == dfvfs_definitions.TYPE_INDICATOR_GZIP:
      posix_time = getattr(stat_object, 'mtime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine earliest year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine earliest year from file stat information with '
          'error: {0!s}').format(exception))
      return None
コード例 #9
0
  def _ParsePlist(self, parser_mediator, match=None, **unused_kwargs):
    """Extracts relevant user timestamp entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
    if 'name' not in match or 'uid' not in match:
      return

    account = match['name'][0]
    uid = match['uid'][0]

    for policy in match.get('passwordpolicyoptions', []):
      try:
        xml_policy = ElementTree.fromstring(policy)
      except (LookupError, ElementTree.ParseError,
              expat.ExpatError) as exception:
        logger.error((
            'Unable to parse XML structure for an user policy, account: '
            '{0:s} and uid: {1!s}, with error: {2!s}').format(
                account, uid, exception))
        continue

      for dict_elements in xml_policy.iterfind('dict'):
        key_values = [value.text for value in dict_elements]
        # Taking a list and converting it to a dict, using every other item
        # as the key and the other one as the value.
        policy_dict = dict(zip(key_values[0::2], key_values[1::2]))

      time_string = policy_dict.get('passwordLastSetTime', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionWarning(
              'unable to parse password last set time string: {0:s}'.format(
                  time_string))

        shadow_hash_data = match.get('ShadowHashData', None)
        if date_time and isinstance(shadow_hash_data, (list, tuple)):
          # Extract the hash password information, which is stored in
          # the attribute ShadowHashData which is a binary plist data.
          try:
            property_list = plistlib.loads(shadow_hash_data[0])
          except plistlib.InvalidFileException as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to parse ShadowHashData with error: {0!s}'.format(
                    exception))
            property_list = {}

          password_hash = 'N/A'

          salted_hash = property_list.get('SALTED-SHA512-PBKDF2', None)
          if salted_hash:
            salt_hex_bytes = codecs.encode(salted_hash['salt'], 'hex')
            salt_string = codecs.decode(salt_hex_bytes, 'ascii')
            entropy_hex_bytes = codecs.encode(salted_hash['entropy'], 'hex')
            entropy_string = codecs.decode(entropy_hex_bytes, 'ascii')
            password_hash = '$ml${0:d}${1:s}${2:s}'.format(
                salted_hash['iterations'], salt_string, entropy_string)

          event_data = plist_event.PlistTimeEventData()
          event_data.desc = (
              'Last time {0:s} ({1!s}) changed the password: {2!s}').format(
                  account, uid, password_hash)
          event_data.key = 'passwordLastSetTime'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

      time_string = policy_dict.get('lastLoginTimestamp', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionWarning(
              'unable to parse last login time string: {0:s}'.format(
                  time_string))

        if date_time:
          event_data = plist_event.PlistTimeEventData()
          event_data.desc = 'Last login from {0:s} ({1!s})'.format(
              account, uid)
          event_data.key = 'lastLoginTimestamp'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

      time_string = policy_dict.get('failedLoginTimestamp', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionWarning(
              'unable to parse failed login time string: {0:s}'.format(
                  time_string))

        if date_time:
          event_data = plist_event.PlistTimeEventData()
          event_data.desc = (
              'Last failed login from {0:s} ({1!s}) ({2!s} times)').format(
                  account, uid, policy_dict.get('failedLoginCount', 0))
          event_data.key = 'failedLoginTimestamp'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)
コード例 #10
0
ファイル: interface.py プロジェクト: NicoLuuu/plaso
  def _GetRecordValues(
      self, parser_mediator, table_name, record, value_mappings=None):
    """Retrieves the values from the record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table_name (str): name of the table.
      record (pyesedb.record): ESE record.
      value_mappings (Optional[dict[str, str]): value mappings, which map
          the column name to a callback method.

    Returns:
      dict[str,object]: values per column name.
    """
    record_values = {}

    for value_entry in range(0, record.number_of_values):
      if parser_mediator.abort:
        break

      column_name = record.get_column_name(value_entry)
      if column_name in record_values:
        logger.warning(
            '[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                self.NAME, column_name, table_name))
        continue

      value_callback = None
      if value_mappings and column_name in value_mappings:
        value_callback_method = value_mappings.get(column_name)
        if value_callback_method:
          value_callback = getattr(self, value_callback_method, None)
          if value_callback is None:
            logger.warning((
                '[{0:s}] missing value callback method: {1:s} for column: '
                '{2:s} in table: {3:s}').format(
                    self.NAME, value_callback_method, column_name, table_name))

      if value_callback:
        try:
          value_data = record.get_value_data(value_entry)
          value = value_callback(value_data)

        except Exception as exception:  # pylint: disable=broad-except
          logger.error(exception)
          value = None
          parser_mediator.ProduceExtractionError((
              'unable to parse value: {0:s} with callback: {1:s} with error: '
              '{2!s}').format(column_name, value_callback_method, exception))

      else:
        try:
          value = self._GetRecordValue(record, value_entry)
        except ValueError as exception:
          value = None
          parser_mediator.ProduceExtractionError(
              'unable to parse value: {0:s} with error: {1!s}'.format(
                  column_name, exception))

      record_values[column_name] = value

    return record_values
コード例 #11
0
    def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
        """Extracts relevant user timestamp entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
        if 'name' not in match or 'uid' not in match:
            return

        account = match['name'][0]
        uid = match['uid'][0]

        for policy in match.get('passwordpolicyoptions', []):
            try:
                xml_policy = ElementTree.fromstring(policy)
            except (ElementTree.ParseError, LookupError) as exception:
                logger.error((
                    'Unable to parse XML structure for an user policy, account: '
                    '{0:s} and uid: {1!s}, with error: {2!s}').format(
                        account, uid, exception))
                continue

            for dict_elements in xml_policy.iterfind('dict'):
                key_values = [value.text for value in iter(dict_elements)]
                # Taking a list and converting it to a dict, using every other item
                # as the key and the other one as the value.
                policy_dict = dict(zip(key_values[0::2], key_values[1::2]))

            time_string = policy_dict.get('passwordLastSetTime', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse password last set time string: {0:s}'.
                        format(time_string))

                shadow_hash_data = match.get('ShadowHashData', None)
                if date_time and isinstance(shadow_hash_data, (list, tuple)):
                    # Extract the hash password information.
                    # It is store in the attribute ShadowHasData which is
                    # a binary plist data; However biplist only extracts one
                    # level of binary plist, then it returns this information
                    # as a string.

                    # TODO: change this into a DataRange instead. For this we
                    # need the file offset and size of the ShadowHashData value data.
                    shadow_hash_data = shadow_hash_data[0]

                    resolver_context = context.Context()
                    fake_file = fake_file_io.FakeFile(resolver_context,
                                                      shadow_hash_data)
                    shadow_hash_data_path_spec = fake_path_spec.FakePathSpec(
                        location='ShadowHashData')
                    fake_file.open(path_spec=shadow_hash_data_path_spec)

                    try:
                        plist_file = biplist.readPlist(fake_file)
                    except biplist.InvalidPlistException:
                        plist_file = {}
                    salted_hash = plist_file.get('SALTED-SHA512-PBKDF2', None)
                    if salted_hash:
                        salt_hex_bytes = codecs.encode(salted_hash['salt'],
                                                       'hex')
                        salt_string = codecs.decode(salt_hex_bytes, 'ascii')
                        entropy_hex_bytes = codecs.encode(
                            salted_hash['entropy'], 'hex')
                        entropy_string = codecs.decode(entropy_hex_bytes,
                                                       'ascii')
                        password_hash = '$ml${0:d}${1:s}${2:s}'.format(
                            salted_hash['iterations'], salt_string,
                            entropy_string)
                    else:
                        password_hash = 'N/A'

                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = (
                        'Last time {0:s} ({1!s}) changed the password: {2!s}'
                    ).format(account, uid, password_hash)
                    event_data.key = 'passwordLastSetTime'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_string = policy_dict.get('lastLoginTimestamp', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse last login time string: {0:s}'.format(
                            time_string))

                if date_time:
                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = 'Last login from {0:s} ({1!s})'.format(
                        account, uid)
                    event_data.key = 'lastLoginTimestamp'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_string = policy_dict.get('failedLoginTimestamp', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse failed login time string: {0:s}'.
                        format(time_string))

                if date_time:
                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = (
                        'Last failed login from {0:s} ({1!s}) ({2!s} times)'
                    ).format(account, uid,
                             policy_dict.get('failedLoginCount', 0))
                    event_data.key = 'failedLoginTimestamp'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
コード例 #12
0
    def _DetermineCacheEntrySize(self, format_type, value_data,
                                 cached_entry_offset):
        """Determines the size of a cached entry.

    Args:
      format_type (int): format type.
      value_data (bytes): value data.
      cached_entry_offset (int): offset of the first cached entry data
          relative to the start of the value data.

    Returns:
      int: cached entry size or None if not cached..

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in (self._FORMAT_TYPE_XP, self._FORMAT_TYPE_2003,
                               self._FORMAT_TYPE_VISTA, self._FORMAT_TYPE_7,
                               self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
            raise RuntimeError('[{0:s}] Unsupported format type: {1:d}'.format(
                self.NAME, format_type))

        cached_entry_data = value_data[cached_entry_offset:]
        cached_entry_size = 0

        if format_type == self._FORMAT_TYPE_XP:
            cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof()

        elif format_type in (self._FORMAT_TYPE_2003, self._FORMAT_TYPE_VISTA,
                             self._FORMAT_TYPE_7):
            path_size = construct.ULInt16('path_size').parse(
                cached_entry_data[0:2])
            maximum_path_size = construct.ULInt16('maximum_path_size').parse(
                cached_entry_data[2:4])
            path_offset_32bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[4:8])
            path_offset_64bit = construct.ULInt32('path_offset').parse(
                cached_entry_data[8:16])

            if maximum_path_size < path_size:
                logger.error('[{0:s}] Path size value out of bounds.'.format(
                    self.NAME))
                return None

            path_end_of_string_size = maximum_path_size - path_size
            if path_size == 0 or path_end_of_string_size != 2:
                logger.error('[{0:s}] Unsupported path size values.'.format(
                    self.NAME))
                return None

            # Assume the entry is 64-bit if the 32-bit path offset is 0 and
            # the 64-bit path offset is set.
            if path_offset_32bit == 0 and path_offset_64bit != 0:
                if format_type == self._FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self._FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self._FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
                    )

            else:
                if format_type == self._FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self._FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self._FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(
                    )

        elif format_type in (self._FORMAT_TYPE_8, self._FORMAT_TYPE_10):
            cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof()

        return cached_entry_size
コード例 #13
0
ファイル: macuser.py プロジェクト: aguilajesus/plaso
  def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
    """Extracts relevant user timestamp entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
    if 'name' not in match or 'uid' not in match:
      return

    account = match['name'][0]
    uid = match['uid'][0]

    for policy in match.get('passwordpolicyoptions', []):
      try:
        xml_policy = ElementTree.fromstring(policy)
      except (ElementTree.ParseError, LookupError) as exception:
        logger.error((
            'Unable to parse XML structure for an user policy, account: '
            '{0:s} and uid: {1!s}, with error: {2!s}').format(
                account, uid, exception))
        continue

      for dict_elements in xml_policy.iterfind('dict'):
        key_values = [value.text for value in iter(dict_elements)]
        # Taking a list and converting it to a dict, using every other item
        # as the key and the other one as the value.
        policy_dict = dict(zip(key_values[0::2], key_values[1::2]))

      time_string = policy_dict.get('passwordLastSetTime', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionError(
              'unable to parse password last set time string: {0:s}'.format(
                  time_string))

        shadow_hash_data = match.get('ShadowHashData', None)
        if date_time and isinstance(shadow_hash_data, (list, tuple)):
          # Extract the hash password information.
          # It is store in the attribute ShadowHasData which is
          # a binary plist data; However biplist only extracts one
          # level of binary plist, then it returns this information
          # as a string.

          # TODO: change this into a DataRange instead. For this we
          # need the file offset and size of the ShadowHashData value data.
          shadow_hash_data = shadow_hash_data[0]

          resolver_context = context.Context()
          fake_file = fake_file_io.FakeFile(
              resolver_context, shadow_hash_data)
          shadow_hash_data_path_spec = fake_path_spec.FakePathSpec(
              location='ShadowHashData')
          fake_file.open(path_spec=shadow_hash_data_path_spec)

          try:
            plist_file = biplist.readPlist(fake_file)
          except biplist.InvalidPlistException:
            plist_file = {}
          salted_hash = plist_file.get('SALTED-SHA512-PBKDF2', None)
          if salted_hash:
            salt_hex_bytes = codecs.encode(salted_hash['salt'], 'hex')
            salt_string = codecs.decode(salt_hex_bytes, 'ascii')
            entropy_hex_bytes = codecs.encode(salted_hash['entropy'], 'hex')
            entropy_string = codecs.decode(entropy_hex_bytes, 'ascii')
            password_hash = '$ml${0:d}${1:s}${2:s}'.format(
                salted_hash['iterations'], salt_string, entropy_string)
          else:
            password_hash = 'N/A'

          event_data = plist_event.PlistTimeEventData()
          event_data.desc = (
              'Last time {0:s} ({1!s}) changed the password: {2!s}').format(
                  account, uid, password_hash)
          event_data.key = 'passwordLastSetTime'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

      time_string = policy_dict.get('lastLoginTimestamp', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionError(
              'unable to parse last login time string: {0:s}'.format(
                  time_string))

        if date_time:
          event_data = plist_event.PlistTimeEventData()
          event_data.desc = 'Last login from {0:s} ({1!s})'.format(
              account, uid)
          event_data.key = 'lastLoginTimestamp'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

      time_string = policy_dict.get('failedLoginTimestamp', None)
      if time_string and time_string != '2001-01-01T00:00:00Z':
        try:
          date_time = dfdatetime_time_elements.TimeElements()
          date_time.CopyFromStringISO8601(time_string)
        except ValueError:
          date_time = None
          parser_mediator.ProduceExtractionError(
              'unable to parse failed login time string: {0:s}'.format(
                  time_string))

        if date_time:
          event_data = plist_event.PlistTimeEventData()
          event_data.desc = (
              'Last failed login from {0:s} ({1!s}) ({2!s} times)').format(
                  account, uid, policy_dict.get('failedLoginCount', 0))
          event_data.key = 'failedLoginTimestamp'
          event_data.root = self._ROOT

          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)
コード例 #14
0
ファイル: interface.py プロジェクト: log2timeline/plaso
  def _GetRecordValues(
      self, parser_mediator, table_name, record, value_mappings=None):
    """Retrieves the values from the record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table_name (str): name of the table.
      record (pyesedb.record): ESE record.
      value_mappings (Optional[dict[str, str]): value mappings, which map
          the column name to a callback method.

    Returns:
      dict[str,object]: values per column name.
    """
    record_values = {}

    for value_entry in range(0, record.number_of_values):
      if parser_mediator.abort:
        break

      column_name = record.get_column_name(value_entry)
      if column_name in record_values:
        logger.warning(
            '[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                self.NAME, column_name, table_name))
        continue

      value_callback = None
      if value_mappings and column_name in value_mappings:
        value_callback_method = value_mappings.get(column_name)
        if value_callback_method:
          value_callback = getattr(self, value_callback_method, None)
          if value_callback is None:
            logger.warning((
                '[{0:s}] missing value callback method: {1:s} for column: '
                '{2:s} in table: {3:s}').format(
                    self.NAME, value_callback_method, column_name, table_name))

      if value_callback:
        try:
          value_data = record.get_value_data(value_entry)
          value = value_callback(value_data)

        except Exception as exception:  # pylint: disable=broad-except
          logger.error(exception)
          value = None
          parser_mediator.ProduceExtractionWarning((
              'unable to parse value: {0:s} with callback: {1:s} with error: '
              '{2!s}').format(column_name, value_callback_method, exception))

      else:
        try:
          value = self._GetRecordValue(record, value_entry)
        except ValueError as exception:
          value = None
          parser_mediator.ProduceExtractionWarning(
              'unable to parse value: {0:s} with error: {1!s}'.format(
                  column_name, exception))

      record_values[column_name] = value

    return record_values