예제 #1
0
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    dynamic_info_size_error_reported = False

    tasks_key = registry_key.GetSubkeyByName('Tasks')
    tree_key = registry_key.GetSubkeyByName('Tree')

    if not tasks_key or not tree_key:
      parser_mediator.ProduceExtractionWarning(
          'Task Cache is missing a Tasks or Tree sub key.')
      return

    task_guids = {}
    for sub_key in tree_key.GetSubkeys():
      for value_key, id_value in self._GetIdValue(sub_key):
        # TODO: improve this check to a regex.
        # The GUID is in the form {%GUID%} and stored an UTF-16 little-endian
        # string and should be 78 bytes in size.
        id_value_data_size = len(id_value.data)
        if id_value_data_size != 78:
          parser_mediator.ProduceExtractionWarning(
              'unsupported Id value data size: {0:d}.'.format(
                  id_value_data_size))
          continue

        guid_string = id_value.GetDataAsObject()
        task_guids[guid_string] = value_key.name

    dynamic_info_map = self._GetDataTypeMap('dynamic_info_record')
    dynamic_info2_map = self._GetDataTypeMap('dynamic_info2_record')

    dynamic_info_size = dynamic_info_map.GetByteSize()
    dynamic_info2_size = dynamic_info2_map.GetByteSize()

    for sub_key in tasks_key.GetSubkeys():
      dynamic_info_value = sub_key.GetValueByName('DynamicInfo')
      if not dynamic_info_value:
        continue

      dynamic_info_record_map = None
      dynamic_info_value_data_size = len(dynamic_info_value.data)
      if dynamic_info_value_data_size == dynamic_info_size:
        dynamic_info_record_map = dynamic_info_map
      elif dynamic_info_value_data_size == dynamic_info2_size:
        dynamic_info_record_map = dynamic_info2_map
      else:
        if not dynamic_info_size_error_reported:
          parser_mediator.ProduceExtractionWarning(
              'unsupported DynamicInfo value data size: {0:d}.'.format(
                  dynamic_info_value_data_size))
          dynamic_info_size_error_reported = True
        continue

      try:
        dynamic_info_record = self._ReadStructureFromByteStream(
            dynamic_info_value.data, 0, dynamic_info_record_map)
      except (ValueError, errors.ParseError) as exception:
        parser_mediator.ProduceExtractionWarning(
            'unable to parse DynamicInfo record with error: {0!s}.'.format(
                exception))

      name = task_guids.get(sub_key.name, sub_key.name)

      event_data = TaskCacheEventData()
      event_data.key_path = registry_key.path
      event_data.task_name = name
      event_data.task_identifier = sub_key.name

      event = time_events.DateTimeValuesEvent(
          registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)

      last_registered_time = dynamic_info_record.last_registered_time
      if last_registered_time:
        # Note this is likely either the last registered time or
        # the update time.
        date_time = dfdatetime_filetime.Filetime(timestamp=last_registered_time)
        event = time_events.DateTimeValuesEvent(
            date_time, 'Last registered time')
        parser_mediator.ProduceEventWithEventData(event, event_data)

      launch_time = dynamic_info_record.launch_time
      if launch_time:
        # Note this is likely the launch time.
        date_time = dfdatetime_filetime.Filetime(timestamp=launch_time)
        event = time_events.DateTimeValuesEvent(
            date_time, 'Launch time')
        parser_mediator.ProduceEventWithEventData(event, event_data)

      unknown_time = getattr(dynamic_info_record, 'unknown_time', None)
      if unknown_time:
        date_time = dfdatetime_filetime.Filetime(timestamp=unknown_time)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_UNKNOWN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #2
0
    def _ParseContainerTable(self, parser_mediator, table, container_name):
        """Parses a Container_# table.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table (pyesedb.table): table.
      container_name (str): container name, which indicates the table type.

    Raises:
      ValueError: if the table value is missing.
    """
        if table is None:
            raise ValueError('Missing table value.')

        for record_index, esedb_record in enumerate(table.records):
            if parser_mediator.abort:
                break

            # TODO: add support for:
            # wpnidm, iecompat, iecompatua, DNTException, DOMStore
            if container_name == 'Content':
                value_mappings = self._CONTAINER_TABLE_VALUE_MAPPINGS
            else:
                value_mappings = None

            try:
                record_values = self._GetRecordValues(
                    parser_mediator,
                    table.name,
                    esedb_record,
                    value_mappings=value_mappings)

            except UnicodeDecodeError:
                parser_mediator.ProduceExtractionWarning(
                    ('Unable to retrieve record values from record: {0:d} '
                     'in table: {1:s}').format(record_index, table.name))
                continue

            if (container_name in self._SUPPORTED_CONTAINER_NAMES
                    or container_name.startswith('MSHist')):
                access_count = record_values.get('AccessCount', None)
                cached_filename = record_values.get('Filename', None)
                cached_file_size = record_values.get('FileSize', None)
                cache_identifier = record_values.get('CacheId', None)
                container_identifier = record_values.get('ContainerId', None)
                entry_identifier = record_values.get('EntryId', None)
                file_extension = record_values.get('FileExtension', None)
                redirect_url = record_values.get('RedirectUrl', None)
                sync_count = record_values.get('SyncCount', None)

                url = record_values.get('Url', '')
                # Ignore an URL that start with a binary value.
                if ord(url[0]) < 0x20 or ord(url[0]) == 0x7f:
                    url = None

                request_headers = record_values.get('RequestHeaders', None)
                # Ignore non-Unicode request headers values.
                if not isinstance(request_headers, str):
                    request_headers = None

                response_headers = record_values.get('ResponseHeaders', None)
                # Ignore non-Unicode response headers values.
                if not isinstance(response_headers, str):
                    response_headers = None

                event_data = MsieWebCacheContainerEventData()
                event_data.access_count = access_count
                event_data.cached_filename = cached_filename
                event_data.cached_file_size = cached_file_size
                event_data.cache_identifier = cache_identifier
                event_data.container_identifier = container_identifier
                event_data.entry_identifier = entry_identifier
                event_data.file_extension = file_extension
                event_data.redirect_url = redirect_url
                event_data.request_headers = request_headers
                event_data.response_headers = response_headers
                event_data.sync_count = sync_count
                event_data.url = url

                timestamp = record_values.get('SyncTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, 'Synchronization time')
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                timestamp = record_values.get('CreationTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_CREATION)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                timestamp = record_values.get('ExpiryTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                timestamp = record_values.get('ModifiedTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                timestamp = record_values.get('AccessedTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                timestamp = record_values.get('PostCheckTime', None)
                if timestamp:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)
                    event = time_events.DateTimeValuesEvent(
                        date_time, 'Post check time')
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
예제 #3
0
  def ParseFileObject(self, parser_mediator, file_object):
    """Parses a Systemd journal file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the header cannot be parsed.
    """
    file_header_map = self._GetDataTypeMap('systemd_journal_file_header')

    try:
      file_header, _ = self._ReadStructureFromFileObject(
          file_object, 0, file_header_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.UnableToParseFile(
          'Unable to parse file header with error: {0!s}'.format(
              exception))

    if file_header.signature != self._FILE_SIGNATURE:
      raise errors.UnableToParseFile('Invalid file signature.')

    if file_header.header_size not in self._SUPPORTED_FILE_HEADER_SIZES:
      raise errors.UnableToParseFile(
          'Unsupported file header size: {0:d}.'.format(
              file_header.header_size))

    data_hash_table_end_offset = (
        file_header.data_hash_table_offset +
        file_header.data_hash_table_size)
    field_hash_table_end_offset = (
        file_header.field_hash_table_offset +
        file_header.field_hash_table_size)
    self._maximum_journal_file_offset = max(
        data_hash_table_end_offset, field_hash_table_end_offset)

    entry_object_offsets = self._ParseEntryObjectOffsets(
        file_object, file_header.entry_array_offset)

    for entry_object_offset in entry_object_offsets:
      if entry_object_offset == 0:
        continue

      try:
        fields = self._ParseJournalEntry(file_object, entry_object_offset)
      except errors.ParseError as exception:
        parser_mediator.ProduceExtractionError((
            'Unable to parse journal entry at offset: 0x{0:08x} with '
            'error: {1!s}').format(entry_object_offset, exception))
        return

      event_data = SystemdJournalEventData()

      event_data.body = fields.get('MESSAGE', None)
      event_data.hostname = fields.get('_HOSTNAME', None)
      event_data.reporter = fields.get('SYSLOG_IDENTIFIER', None)

      if event_data.reporter and event_data.reporter != 'kernel':
        event_data.pid = fields.get('_PID', fields.get('SYSLOG_PID', None))

      date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
          timestamp=fields['real_time'])
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #4
0
    def ParseFileTransfer(self,
                          parser_mediator,
                          query,
                          row,
                          cache=None,
                          database=None,
                          **unused_kwargs):
        """Parses a file transfer.

    There is no direct relationship between who sends the file and
    who accepts the file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.
    """
        query_hash = hash(query)

        source_dict = cache.GetResults('source')
        if not source_dict:
            results = database.Query(self.QUERY_SOURCE_FROM_TRANSFER)

            cache.CacheQueryResults(results, 'source', 'pk_id',
                                    ('skypeid', 'skypename'))
            source_dict = cache.GetResults('source')

        dest_dict = cache.GetResults('destination')
        if not dest_dict:
            results = database.Query(self.QUERY_DEST_FROM_TRANSFER)

            cache.CacheQueryResults(results, 'destination', 'parent_id',
                                    ('skypeid', 'skypename'))
            dest_dict = cache.GetResults('destination')

        source = 'Unknown'
        destination = 'Unknown'

        parent_id = self._GetRowValue(query_hash, row, 'parent_id')
        partner_dispname = self._GetRowValue(query_hash, row,
                                             'partner_dispname')
        partner_handle = self._GetRowValue(query_hash, row, 'partner_handle')

        if parent_id:
            destination = '{0:s} <{1:s}>'.format(partner_handle,
                                                 partner_dispname)
            skype_id, skype_name = source_dict.get(parent_id, [None, None])
            if skype_name:
                source = '{0:s} <{1:s}>'.format(skype_id, skype_name)
        else:
            source = '{0:s} <{1:s}>'.format(partner_handle, partner_dispname)

            pk_id = self._GetRowValue(query_hash, row, 'pk_id')
            if pk_id:
                skype_id, skype_name = dest_dict.get(pk_id, [None, None])
                if skype_name:
                    destination = '{0:s} <{1:s}>'.format(skype_id, skype_name)

        filename = self._GetRowValue(query_hash, row, 'filename')
        filesize = self._GetRowValue(query_hash, row, 'filesize')

        try:
            file_size = int(filesize, 10)
        except ValueError:
            parser_mediator.ProduceExtractionError(
                'unable to convert file size: {0!s} of file: {1:s}'.format(
                    filesize, filename))
            file_size = 0

        event_data = SkypeTransferFileEventData()
        event_data.destination = destination
        event_data.offset = self._GetRowValue(query_hash, row, 'id')
        event_data.query = query
        event_data.source = source
        event_data.transferred_filename = filename
        event_data.transferred_filepath = self._GetRowValue(
            query_hash, row, 'filepath')
        event_data.transferred_filesize = file_size

        status = self._GetRowValue(query_hash, row, 'status')
        starttime = self._GetRowValue(query_hash, row, 'starttime')

        if status == 2:
            if starttime:
                event_data.action_type = 'SENDSOLICITUDE'

                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=starttime)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File transfer from Skype')
                parser_mediator.ProduceEventWithEventData(event, event_data)

        elif status == 8:
            if starttime:
                event_data.action_type = 'GETSOLICITUDE'

                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=starttime)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File transfer from Skype')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            accepttime = self._GetRowValue(query_hash, row, 'accepttime')
            if accepttime:
                event_data.action_type = 'ACCEPTED'

                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=accepttime)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File transfer from Skype')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            finishtime = self._GetRowValue(query_hash, row, 'finishtime')
            if finishtime:
                event_data.action_type = 'FINISHED'

                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=finishtime)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File transfer from Skype')
                parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #5
0
    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows job file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            header_struct = self._JOB_FIXED_LENGTH_SECTION_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse fixed-length section with error: {0:s}'.
                format(exception))

        if not header_struct.product_version in self._PRODUCT_VERSIONS:
            raise errors.UnableToParseFile(
                u'Unsupported product version in: 0x{0:04x}'.format(
                    header_struct.product_version))

        if not header_struct.format_version == 1:
            raise errors.UnableToParseFile(
                u'Unsupported format version in: {0:d}'.format(
                    header_struct.format_version))

        try:
            job_variable_struct = self._JOB_VARIABLE_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse variable-length section with error: {0:s}'.
                format(exception))

        event_data = WinJobEventData()
        event_data.application = binary.ReadUTF16(
            job_variable_struct.application)
        event_data.comment = binary.ReadUTF16(job_variable_struct.comment)
        event_data.parameters = binary.ReadUTF16(job_variable_struct.parameter)
        event_data.username = binary.ReadUTF16(job_variable_struct.username)
        event_data.working_directory = binary.ReadUTF16(
            job_variable_struct.working_directory)

        systemtime_struct = header_struct.last_run_time
        system_time_tuple = (systemtime_struct.year, systemtime_struct.month,
                             systemtime_struct.weekday, systemtime_struct.day,
                             systemtime_struct.hours,
                             systemtime_struct.minutes,
                             systemtime_struct.seconds,
                             systemtime_struct.milliseconds)

        date_time = None
        if system_time_tuple != self._EMPTY_SYSTEM_TIME_TUPLE:
            try:
                date_time = dfdatetime_systemtime.Systemtime(
                    system_time_tuple=system_time_tuple)
            except ValueError:
                parser_mediator.ProduceExtractionError(
                    u'invalid last run time: {0!s}'.format(system_time_tuple))

        if date_time:
            event = time_events.DateTimeValuesEvent(
                date_time, eventdata.EventTimestamp.LAST_RUNTIME)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for index in range(job_variable_struct.number_of_triggers):
            try:
                trigger_struct = self._TRIGGER_STRUCT.parse_stream(file_object)
            except (IOError, construct.FieldError) as exception:
                parser_mediator.ProduceExtractionError(
                    u'unable to parse trigger: {0:d} with error: {1:s}'.format(
                        index, exception))
                return

            event_data.trigger_type = trigger_struct.trigger_type

            time_elements_tuple = (trigger_struct.start_year,
                                   trigger_struct.start_month,
                                   trigger_struct.start_day,
                                   trigger_struct.start_hour,
                                   trigger_struct.start_minute, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_MINUTE
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger start time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to start',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_elements_tuple = (trigger_struct.end_year,
                                   trigger_struct.end_month,
                                   trigger_struct.end_day, 0, 0, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_DAY
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger end time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to end',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
예제 #6
0
파일: ntfs.py 프로젝트: siriusAnalyst/plaso
    def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
        """Parses an USN change journal.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal.

    Raises:
      ParseError: if an USN change journal record cannot be parsed.
    """
        if not usn_change_journal:
            return

        usn_record_map = self._GetDataTypeMap('usn_record_v2')

        usn_record_data = usn_change_journal.read_usn_record()
        while usn_record_data:
            current_offset = usn_change_journal.get_offset()

            try:
                usn_record = self._ReadStructureFromByteStream(
                    usn_record_data, current_offset, usn_record_map)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError((
                    'Unable to parse USN record at offset: 0x{0:08x} with error: '
                    '{1!s}').format(current_offset, exception))

            # Per MSDN we need to use name offset for forward compatibily.
            name_offset = usn_record.name_offset - 60
            utf16_stream = usn_record.name[name_offset:usn_record.name_size]

            try:
                name_string = utf16_stream.decode('utf-16-le')
            except (UnicodeDecodeError, UnicodeEncodeError) as exception:
                name_string = utf16_stream.decode('utf-16-le',
                                                  errors='replace')
                parser_mediator.ProduceExtractionError((
                    'unable to decode USN record name string with error: '
                    '{0:s}. Characters that cannot be decoded will be replaced '
                    'with "?" or "\\ufffd".').format(exception))

            event_data = NTFSUSNChangeEventData()
            event_data.file_attribute_flags = usn_record.file_attribute_flags
            event_data.file_reference = usn_record.file_reference
            event_data.filename = name_string
            event_data.offset = current_offset
            event_data.parent_file_reference = usn_record.parent_file_reference
            event_data.update_reason_flags = usn_record.update_reason_flags
            event_data.update_sequence_number = usn_record.update_sequence_number
            event_data.update_source_flags = usn_record.update_source_flags

            if not usn_record.update_date_time:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=usn_record.update_date_time)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            usn_record_data = usn_change_journal.read_usn_record()
예제 #7
0
    def ParseAccountInformation(self, parser_mediator, query, row,
                                **unused_kwargs):
        """Parses account information.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row with account information.
    """
        query_hash = hash(query)

        display_name = self._GetRowValue(query_hash, row, 'given_displayname')
        fullname = self._GetRowValue(query_hash, row, 'fullname')

        # TODO: Move this to the formatter, and ensure username is rendered
        # properly when fullname and/or display_name is None.
        username = '******'.format(fullname, display_name)

        event_data = SkypeAccountEventData()
        event_data.country = self._GetRowValue(query_hash, row, 'country')
        event_data.display_name = display_name
        event_data.email = self._GetRowValue(query_hash, row, 'emails')
        event_data.offset = self._GetRowValue(query_hash, row, 'id')
        event_data.query = query
        event_data.username = username

        timestamp = self._GetRowValue(query_hash, row, 'profile_timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Profile Changed')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'authreq_timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Authenticate Request')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'lastonline_timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time, 'Last Online')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'mood_timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time, 'Mood Event')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'sent_authrequest_time')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Auth Request Sent')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'lastused_timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time, 'Last Used')
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #8
0
    def _ParseFileStatAttribute(self, parser_mediator, mft_entry,
                                mft_attribute, path_hints):
        """Extract data from a NFTS $STANDARD_INFORMATION or $FILE_NAME attribute.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      mft_entry (pyfsntfs.file_entry): MFT entry.
      mft_attribute (pyfsntfs.attribute): MFT attribute.
      path_hints (list[str]): hints about the full path of the file.
    """
        event_data = NTFSFileStatEventData()
        event_data.attribute_type = mft_attribute.attribute_type
        event_data.file_reference = mft_entry.file_reference
        event_data.is_allocated = mft_entry.is_allocated()
        event_data.path_hints = path_hints

        if mft_attribute.attribute_type == self._MFT_ATTRIBUTE_FILE_NAME:
            event_data.file_attribute_flags = mft_attribute.file_attribute_flags
            event_data.name = mft_attribute.name
            event_data.parent_file_reference = mft_attribute.parent_file_reference

        try:
            creation_time = mft_attribute.get_creation_time_as_integer()
        except OverflowError as exception:
            parser_mediator.ProduceExtractionWarning(
                ('unable to read the creation timestamp from MFT attribute: '
                 '0x{0:08x} with error: {1!s}').format(
                     mft_attribute.attribute_type, exception))
            creation_time = None

        if creation_time is not None:
            date_time = self._GetDateTime(creation_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        try:
            modification_time = mft_attribute.get_modification_time_as_integer(
            )
        except OverflowError as exception:
            parser_mediator.ProduceExtractionWarning((
                'unable to read the modification timestamp from MFT attribute: '
                '0x{0:08x} with error: {1!s}').format(
                    mft_attribute.attribute_type, exception))
            modification_time = None

        if modification_time is not None:
            date_time = self._GetDateTime(modification_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        try:
            access_time = mft_attribute.get_access_time_as_integer()
        except OverflowError as exception:
            parser_mediator.ProduceExtractionWarning(
                ('unable to read the access timestamp from MFT attribute: '
                 '0x{0:08x} with error: {1!s}').format(
                     exception, mft_attribute.attribute_type))
            access_time = None

        if access_time is not None:
            date_time = self._GetDateTime(access_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        try:
            entry_modification_time = (
                mft_attribute.get_entry_modification_time_as_integer())
        except OverflowError as exception:
            parser_mediator.ProduceExtractionWarning(
                ('unable to read the entry modification timestamp from MFT '
                 'attribute: 0x{0:08x} with error: {1!s}').format(
                     mft_attribute.attribute_type, exception))
            entry_modification_time = None

        if entry_modification_time is not None:
            date_time = self._GetDateTime(entry_modification_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #9
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        network_info = {}
        signatures = registry_key.GetSubkeyByName('Signatures')
        if signatures:
            network_info = self._GetNetworkInfo(signatures)

        profiles = registry_key.GetSubkeyByName('Profiles')
        if not profiles:
            return

        for subkey in profiles.GetSubkeys():
            default_gateway_mac, dns_suffix = network_info.get(
                subkey.name, (None, None))

            event_data = WindowsRegistryNetworkListEventData()
            event_data.default_gateway_mac = default_gateway_mac
            event_data.dns_suffix = dns_suffix

            ssid_value = subkey.GetValueByName('ProfileName')
            if ssid_value:
                event_data.ssid = ssid_value.GetDataAsObject()

            description_value = subkey.GetValueByName('Description')
            if description_value:
                event_data.description = description_value.GetDataAsObject()

            connection_type_value = subkey.GetValueByName('NameType')
            if connection_type_value:
                connection_type = connection_type_value.GetDataAsObject()
                # TODO: move to formatter.
                connection_type = self._CONNECTION_TYPE.get(
                    connection_type, 'unknown')
                event_data.connection_type = connection_type

            date_created_value = subkey.GetValueByName('DateCreated')
            if date_created_value:
                try:
                    date_time = self._ParseSystemTime(date_created_value.data)
                except errors.ParseError as exception:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse date created with error: {0!s}'.
                        format(exception))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_CREATION)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            date_last_connected_value = subkey.GetValueByName(
                'DateLastConnected')
            if date_last_connected_value:
                try:
                    date_time = self._ParseSystemTime(
                        date_last_connected_value.data)
                except errors.ParseError as exception:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse date last connected with error: {0!s}'
                        .format(exception))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
예제 #10
0
  def ParseDestList(self, parser_mediator, olecf_item):
    """Parses the DestList OLECF item.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      olecf_item (pyolecf.item): OLECF item.
    """
    try:
      header = self._DEST_LIST_STREAM_HEADER.parse_stream(olecf_item)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          'Unable to parse DestList header with error: {0!s}'.format(
              exception))

    if header.format_version not in (1, 3, 4):
      parser_mediator.ProduceExtractionError(
          'unsupported format version: {0:d}.'.format(header.format_version))

    if header.format_version == 1:
      dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V1
    elif header.format_version in (3, 4):
      dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V3

    entry_offset = olecf_item.get_offset()
    while entry_offset < olecf_item.size:
      try:
        entry = dest_list_stream_entry.parse_stream(olecf_item)
      except (IOError, construct.FieldError) as exception:
        raise errors.UnableToParseFile(
            'Unable to parse DestList entry with error: {0!s}'.format(
                exception))

      if not entry:
        break

      display_name = 'DestList entry at offset: 0x{0:08x}'.format(entry_offset)

      try:
        droid_volume_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_volume_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_volume_identifier = ''
        parser_mediator.ProduceExtractionError(
            'unable to read droid volume identifier with error: {0!s}'.format(
                exception))

      try:
        droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_file_identifier = ''
        parser_mediator.ProduceExtractionError(
            'unable to read droid file identifier with error: {0!s}'.format(
                exception))

      try:
        birth_droid_volume_identifier = (
            self._ParseDistributedTrackingIdentifier(
                parser_mediator, entry.birth_droid_volume_identifier,
                display_name))

      except (TypeError, ValueError) as exception:
        birth_droid_volume_identifier = ''
        parser_mediator.ProduceExtractionError((
            'unable to read birth droid volume identifier with error: '
            '{0:s}').format(
                exception))

      try:
        birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.birth_droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        birth_droid_file_identifier = ''
        parser_mediator.ProduceExtractionError((
            'unable to read birth droid file identifier with error: '
            '{0:s}').format(
                exception))

      if entry.last_modification_time == 0:
        date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      else:
        date_time = dfdatetime_filetime.Filetime(
            timestamp=entry.last_modification_time)

      event_data = AutomaticDestinationsDestListEntryEventData()
      event_data.birth_droid_file_identifier = birth_droid_file_identifier
      event_data.birth_droid_volume_identifier = birth_droid_volume_identifier
      event_data.droid_file_identifier = droid_file_identifier
      event_data.droid_volume_identifier = droid_volume_identifier
      event_data.entry_number = entry.entry_number
      event_data.hostname = binary.ByteStreamCopyToString(
          entry.hostname, codepage='ascii')
      event_data.offset = entry_offset
      event_data.path = binary.UTF16StreamCopyToString(entry.path)
      event_data.pin_status = entry.pin_status

      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

      entry_offset = olecf_item.get_offset()
예제 #11
0
    def ParseCookieRow(self,
                       parser_mediator,
                       row,
                       query=None,
                       **unused_kwargs):
        """Parses a cookie row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      query (Optional[str]): query.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        cookie_data = row['value']
        cookie_name = row['name']

        hostname = row['host']
        if hostname.startswith('.'):
            hostname = hostname[1:]

        is_secure = bool(row['isSecure'])
        if is_secure:
            url_scheme = u'https'
        else:
            url_scheme = u'http'

        path = row['path']
        url = u'{0:s}://{1:s}{2:s}'.format(url_scheme, hostname, path)

        event_data = FirefoxCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_data
        event_data.host = hostname
        event_data.httponly = bool(row['isHttpOnly'])
        event_data.offset = row['id']
        event_data.path = path
        event_data.query = query
        event_data.secure = is_secure
        event_data.url = url

        timestamp = row['creationTime']
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
                timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = row['lastAccessed']
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
                timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = row['expiry']
        if timestamp:
            # Expiry time (nsCookieService::GetExpiry in
            # netwerk/cookie/nsCookieService.cpp).
            # It's calculated as the difference between the server time and the time
            # the server wants the cookie to expire and adding that difference to the
            # client time. This localizes the client time regardless of whether or not
            # the TZ environment variable was set on the client.

            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        # Go through all cookie plugins to see if there are is any specific parsing
        # needed.
        for cookie_plugin in self._cookie_plugins:
            try:
                cookie_plugin.UpdateChainAndProcess(parser_mediator,
                                                    cookie_name=cookie_name,
                                                    cookie_data=cookie_data,
                                                    url=url)
            except errors.WrongPlugin:
                pass
예제 #12
0
    def _ParseContainerConfigJSON(self, parser_mediator, file_object):
        """Extracts events from a Docker container configuration file.

    The path of each container config file is:
    DOCKER_DIR/containers/<container_id>/config.json

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file is not a valid container config file.
    """
        file_content = file_object.read()
        file_content = codecs.decode(file_content, self._ENCODING)

        json_dict = json.loads(file_content)

        if 'Driver' not in json_dict:
            raise errors.UnableToParseFile(
                'not a valid Docker container configuration file, '
                'missing '
                '\'Driver\' key.')

        container_id_from_path = self._GetIdentifierFromPath(parser_mediator)
        container_id_from_json = json_dict.get('ID', None)
        if not container_id_from_json:
            raise errors.UnableToParseFile(
                'not a valid Docker layer configuration file, the \'ID\' key is '
                'missing from the JSON dict (should be {0:s})'.format(
                    container_id_from_path))

        if container_id_from_json != container_id_from_path:
            raise errors.UnableToParseFile(
                'not a valid Docker container configuration file. The \'ID\' key of '
                'the JSON dict ({0:s}) is different from the layer ID taken from the'
                ' path to the file ({1:s}) JSON file.)'.format(
                    container_id_from_json, container_id_from_path))

        if 'Config' in json_dict and 'Hostname' in json_dict['Config']:
            container_name = json_dict['Config']['Hostname']
        else:
            container_name = 'Unknown container name'

        event_data = DockerJSONContainerEventData()
        event_data.container_id = container_id_from_path
        event_data.container_name = container_name

        json_state = json_dict.get('State', None)
        if json_state is not None:
            time_string = json_state.get('StartedAt', None)
            if time_string is not None:
                event_data.action = 'Container Started'

                try:
                    date_time = dfdatetime_time_elements.TimeElementsInMicroseconds(
                    )
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError as exception:
                    parser_mediator.ProduceExtractionWarning((
                        'Unable to parse container start time string: {0:s} with error: '
                        '{1!s}').format(time_string, exception))
                    date_time = dfdatetime_semantic_time.InvalidTime()

                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_START)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            time_string = json_state.get('FinishedAt', None)
            if time_string is not None:
                # If the timestamp is 0001-01-01T00:00:00Z, the container
                # is still running, so we don't generate a Finished event
                if time_string != '0001-01-01T00:00:00Z':
                    event_data.action = 'Container Finished'

                    try:
                        date_time = dfdatetime_time_elements.TimeElementsInMicroseconds(
                        )
                        date_time.CopyFromStringISO8601(time_string)
                    except ValueError as exception:
                        parser_mediator.ProduceExtractionWarning((
                            'Unable to parse container finish time string: {0:s} with '
                            'error: {1!s}').format(time_string, exception))
                        date_time = dfdatetime_semantic_time.InvalidTime()

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_END)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

        time_string = json_dict.get('Created', None)
        if time_string is not None:
            event_data.action = 'Container Created'

            try:
                date_time = dfdatetime_time_elements.TimeElementsInMicroseconds(
                )
                date_time.CopyFromStringISO8601(time_string)
            except ValueError as exception:
                parser_mediator.ProduceExtractionWarning((
                    'Unable to parse container created time string: {0:s} with error: '
                    '{1!s}').format(time_string, exception))
                date_time = dfdatetime_semantic_time.InvalidTime()

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ADDED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #13
0
파일: winlnk.py 프로젝트: tomchop/plaso
    def ParseFileLNKFile(self, parser_mediator, file_object, display_name):
        """Parses a Windows Shortcut (LNK) file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      display_name (str): display name.
    """
        lnk_file = pylnk.file()
        lnk_file.set_ascii_codepage(parser_mediator.codepage)

        try:
            lnk_file.open_file_object(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to open file with error: {0!s}'.format(exception))
            return

        link_target = None
        if lnk_file.link_target_identifier_data:  # pylint: disable=using-constant-test
            # TODO: change file_entry.name to display name once it is generated
            # correctly.
            display_name = parser_mediator.GetFilename()
            shell_items_parser = shell_items.ShellItemsParser(display_name)
            shell_items_parser.ParseByteStream(
                parser_mediator,
                lnk_file.link_target_identifier_data,
                codepage=parser_mediator.codepage)

            link_target = shell_items_parser.CopyToPath()

        event_data = WinLnkLinkEventData()
        event_data.birth_droid_file_identifier = (
            lnk_file.birth_droid_file_identifier)
        event_data.birth_droid_volume_identifier = (
            lnk_file.birth_droid_volume_identifier)
        event_data.command_line_arguments = lnk_file.command_line_arguments
        event_data.description = lnk_file.description
        event_data.drive_serial_number = lnk_file.drive_serial_number
        event_data.drive_type = lnk_file.drive_type
        event_data.droid_file_identifier = lnk_file.droid_file_identifier
        event_data.droid_volume_identifier = lnk_file.droid_volume_identifier
        event_data.env_var_location = lnk_file.environment_variables_location
        event_data.file_attribute_flags = lnk_file.file_attribute_flags
        event_data.file_size = lnk_file.file_size
        event_data.icon_location = lnk_file.icon_location
        event_data.link_target = link_target
        event_data.local_path = lnk_file.local_path
        event_data.network_path = lnk_file.network_path
        event_data.relative_path = lnk_file.relative_path
        event_data.volume_label = lnk_file.volume_label
        event_data.working_directory = lnk_file.working_directory

        access_time = lnk_file.get_file_access_time_as_integer()
        if access_time != 0:
            date_time = dfdatetime_filetime.Filetime(timestamp=access_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        creation_time = lnk_file.get_file_creation_time_as_integer()
        if creation_time != 0:
            date_time = dfdatetime_filetime.Filetime(timestamp=creation_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        modification_time = lnk_file.get_file_modification_time_as_integer()
        if modification_time != 0:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=modification_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if access_time == 0 and creation_time == 0 and modification_time == 0:
            date_time = dfdatetime_semantic_time.NotSet()
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if lnk_file.droid_file_identifier:  # pylint: disable=using-constant-test
            try:
                self._ParseDistributedTrackingIdentifier(
                    parser_mediator, lnk_file.droid_file_identifier,
                    display_name)
            except (TypeError, ValueError) as exception:
                parser_mediator.ProduceExtractionWarning(
                    'unable to read droid file identifier with error: {0!s}.'.
                    format(exception))

        if lnk_file.birth_droid_file_identifier:  # pylint: disable=using-constant-test
            try:
                self._ParseDistributedTrackingIdentifier(
                    parser_mediator, lnk_file.birth_droid_file_identifier,
                    display_name)
            except (TypeError, ValueError) as exception:
                parser_mediator.ProduceExtractionWarning(
                    ('unable to read birth droid file identifier with error: '
                     '{0!s}.').format(exception))

        lnk_file.close()
예제 #14
0
파일: userassist.py 프로젝트: tomchop/plaso
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        version_value = registry_key.GetValueByName('Version')
        count_subkey = registry_key.GetSubkeyByName('Count')

        if not version_value:
            parser_mediator.ProduceExtractionWarning('missing version value')
            return

        if not version_value.DataIsInteger():
            parser_mediator.ProduceExtractionWarning(
                'unsupported version value data type')
            return

        format_version = version_value.GetDataAsObject()
        if format_version not in (3, 5):
            parser_mediator.ProduceExtractionWarning(
                'unsupported format version: {0:d}'.format(format_version))
            return

        if not count_subkey:
            parser_mediator.ProduceExtractionWarning('missing count subkey')
            return

        userassist_entry_index = 0

        for registry_value in count_subkey.GetValues():
            try:
                # Note that Python 2 codecs.decode() does not support keyword arguments
                # such as encodings='rot-13'.
                value_name = codecs.decode(registry_value.name, 'rot-13')
            except UnicodeEncodeError as exception:
                logger.debug((
                    'Unable to decode UserAssist string: {0:s} with error: {1!s}.\n'
                    'Attempting piecewise decoding.').format(
                        registry_value.name, exception))

                characters = []
                for char in registry_value.name:
                    if ord(char) < 128:
                        try:
                            characters.append(char.decode('rot-13'))
                        except UnicodeEncodeError:
                            characters.append(char)
                    else:
                        characters.append(char)

                value_name = ''.join(characters)

            if format_version == 5:
                path_segments = value_name.split('\\')

                for segment_index, path_segment in enumerate(path_segments):
                    # Remove the { } from the path segment to get the GUID.
                    guid = path_segments[segment_index][1:-1]
                    path_segments[segment_index] = known_folder_ids.PATHS.get(
                        guid, path_segment)

                value_name = '\\'.join(path_segments)
                # Check if we might need to substitute values.
                if '%' in value_name:
                    # TODO: fix missing self._knowledge_base
                    # pylint: disable=no-member
                    environment_variables = self._knowledge_base.GetEnvironmentVariables(
                    )
                    value_name = path_helper.PathHelper.ExpandWindowsPath(
                        value_name, environment_variables)

            if value_name == 'UEME_CTLSESSION':
                continue

            if format_version == 3:
                entry_map = self._GetDataTypeMap('user_assist_entry_v3')
            elif format_version == 5:
                entry_map = self._GetDataTypeMap('user_assist_entry_v5')
            else:
                parser_mediator.ProduceExtractionWarning(
                    'unsupported format version: {0:d}'.format(format_version))
                continue

            if not registry_value.DataIsBinaryData():
                parser_mediator.ProduceExtractionWarning(
                    'unsupported value data type: {0:s}'.format(
                        registry_value.data_type_string))
                continue

            entry_data_size = entry_map.GetByteSize()
            value_data_size = len(registry_value.data)
            if entry_data_size != value_data_size:
                parser_mediator.ProduceExtractionWarning(
                    'unsupported value data size: {0:d}'.format(
                        value_data_size))
                continue

            try:
                user_assist_entry = self._ReadStructureFromByteStream(
                    registry_value.data, 0, entry_map)
            except (ValueError, errors.ParseError) as exception:
                parser_mediator.ProduceExtractionWarning(
                    'unable to parse UserAssist entry value with error: {0!s}'.
                    format(exception))
                continue

            event_data = UserAssistWindowsRegistryEventData()
            event_data.key_path = count_subkey.path
            event_data.number_of_executions = user_assist_entry.number_of_executions
            event_data.value_name = value_name

            if format_version == 3:
                if event_data.number_of_executions > 5:
                    event_data.number_of_executions -= 5

            elif format_version == 5:
                userassist_entry_index += 1

                event_data.application_focus_count = (
                    user_assist_entry.application_focus_count)
                event_data.application_focus_duration = (
                    user_assist_entry.application_focus_duration)
                event_data.entry_index = userassist_entry_index

            timestamp = user_assist_entry.last_execution_time
            if not timestamp:
                date_time = dfdatetime_semantic_time.NotSet()
            else:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #15
0
    def ParseCookieRow(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a cookie row.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from the query.
    """
        query_hash = hash(query)

        cookie_name = self._GetRowValue(query_hash, row, 'name')
        cookie_data = self._GetRowValue(query_hash, row, 'value')

        hostname = self._GetRowValue(query_hash, row, 'host_key')
        if hostname.startswith('.'):
            hostname = hostname[1:]

        httponly = self._GetRowValue(query_hash, row, 'httponly')
        path = self._GetRowValue(query_hash, row, 'path')
        persistent = self._GetRowValue(query_hash, row, 'persistent')
        secure = self._GetRowValue(query_hash, row, 'secure')

        if secure:
            scheme = 'https'
        else:
            scheme = 'http'

        url = '{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)

        event_data = ChromeCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_data
        event_data.host = hostname
        event_data.httponly = bool(httponly)
        event_data.path = path
        event_data.persistent = bool(persistent)
        event_data.query = query
        event_data.secure = bool(secure)
        event_data.url = url

        timestamp = self._GetRowValue(query_hash, row, 'creation_utc')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'last_access_utc')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'expires_utc')
        if timestamp:
            date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for plugin in self._cookie_plugins:
            if cookie_name != plugin.COOKIE_NAME:
                continue

            try:
                plugin.UpdateChainAndProcess(parser_mediator,
                                             cookie_data=cookie_data,
                                             cookie_name=cookie_name,
                                             url=url)

            except Exception as exception:  # pylint: disable=broad-except
                parser_mediator.ProduceExtractionError(
                    'plugin: {0:s} unable to parse cookie with error: {1!s}'.
                    format(plugin.NAME, exception))
예제 #16
0
파일: sam_users.py 프로젝트: tomchop/plaso
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    names_key = registry_key.GetSubkeyByName('Names')
    if not names_key:
      parser_mediator.ProduceExtractionWarning('missing subkey: Names.')
      return

    last_written_time_per_username = {
        registry_value.name: registry_value.last_written_time
        for registry_value in names_key.GetSubkeys()}

    for subkey in registry_key.GetSubkeys():
      if subkey.name == 'Names':
        continue

      try:
        f_value = self._ParseFValue(subkey)
      except errors.ParseError as exception:
        parser_mediator.ProduceExtractionWarning(
            'unable to parse F value with error: {0!s}'.format(exception))
        continue

      registry_value = subkey.GetValueByName('V')
      if not registry_value:
        parser_mediator.ProduceExtractionWarning(
            'missing Registry value: "V" in subkey: {0:s}.'.format(
                subkey.name))
        continue

      v_value_map = self._GetDataTypeMap('v_value')

      try:
        v_value = self._ReadStructureFromByteStream(
            registry_value.data, 0, v_value_map)
      except (ValueError, errors.ParseError) as exception:
        parser_mediator.ProduceExtractionWarning(
            'unable to parse V value with error: {0!s}'.format(exception))
        continue

      username = self._ParseVValueString(
          parser_mediator, registry_value.data, v_value[1])

      fullname = self._ParseVValueString(
          parser_mediator, registry_value.data, v_value[2])

      comments = self._ParseVValueString(
          parser_mediator, registry_value.data, v_value[3])

      last_written_time = last_written_time_per_username.get(username, None)

      # TODO: check if subkey.name == f_value.rid

      event_data = SAMUsersWindowsRegistryEventData()
      event_data.account_rid = f_value.rid
      event_data.comments = comments
      event_data.fullname = fullname
      event_data.key_path = registry_key.path
      event_data.login_count = f_value.number_of_logons
      event_data.username = username

      event = time_events.DateTimeValuesEvent(
          last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)

      if f_value.last_login_time != 0:
        date_time = dfdatetime_filetime.Filetime(
            timestamp=f_value.last_login_time)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_LOGIN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

      if f_value.last_password_set_time != 0:
        date_time = dfdatetime_filetime.Filetime(
            timestamp=f_value.last_password_set_time)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_PASSWORD_RESET)
        parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #17
0
파일: ntfs.py 프로젝트: siriusAnalyst/plaso
    def _ParseMFTAttribute(self, parser_mediator, mft_entry, mft_attribute):
        """Extract data from a NFTS $MFT attribute.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      mft_entry (pyfsntfs.file_entry): MFT entry.
      mft_attribute (pyfsntfs.attribute): MFT attribute.
    """
        if mft_entry.is_empty() or mft_entry.base_record_file_reference != 0:
            return

        if mft_attribute.attribute_type in [
                self._MFT_ATTRIBUTE_STANDARD_INFORMATION,
                self._MFT_ATTRIBUTE_FILE_NAME
        ]:

            file_attribute_flags = getattr(mft_attribute,
                                           'file_attribute_flags', None)
            name = getattr(mft_attribute, 'name', None)
            parent_file_reference = getattr(mft_attribute,
                                            'parent_file_reference', None)

            event_data = NTFSFileStatEventData()
            event_data.attribute_type = mft_attribute.attribute_type
            event_data.file_attribute_flags = file_attribute_flags
            event_data.file_reference = mft_entry.file_reference
            event_data.is_allocated = mft_entry.is_allocated()
            event_data.name = name
            event_data.parent_file_reference = parent_file_reference

            try:
                creation_time = mft_attribute.get_creation_time_as_integer()
            except OverflowError as exception:
                parser_mediator.ProduceExtractionError((
                    'unable to read the creation timestamp from MFT attribute: '
                    '0x{0:08x} with error: {1!s}').format(
                        mft_attribute.attribute_type, exception))
                creation_time = None

            if creation_time is not None:
                date_time = self._GetDateTime(creation_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_CREATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            try:
                modification_time = mft_attribute.get_modification_time_as_integer(
                )
            except OverflowError as exception:
                parser_mediator.ProduceExtractionError((
                    'unable to read the modification timestamp from MFT attribute: '
                    '0x{0:08x} with error: {1!s}').format(
                        mft_attribute.attribute_type, exception))
                modification_time = None

            if modification_time is not None:
                date_time = self._GetDateTime(modification_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            try:
                access_time = mft_attribute.get_access_time_as_integer()
            except OverflowError as exception:
                parser_mediator.ProduceExtractionError(
                    ('unable to read the access timestamp from MFT attribute: '
                     '0x{0:08x} with error: {1!s}').format(
                         exception, mft_attribute.attribute_type))
                access_time = None

            if access_time is not None:
                date_time = self._GetDateTime(access_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            try:
                entry_modification_time = (
                    mft_attribute.get_entry_modification_time_as_integer())
            except OverflowError as exception:
                parser_mediator.ProduceExtractionError((
                    'unable to read the entry modification timestamp from MFT '
                    'attribute: 0x{0:08x} with error: {1!s}').format(
                        mft_attribute.attribute_type, exception))
                entry_modification_time = None

            if entry_modification_time is not None:
                date_time = self._GetDateTime(entry_modification_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        elif mft_attribute.attribute_type == self._MFT_ATTRIBUTE_OBJECT_ID:
            display_name = '$MFT: {0:d}-{1:d}'.format(
                mft_entry.file_reference & 0xffffffffffff,
                mft_entry.file_reference >> 48)

            if mft_attribute.droid_file_identifier:
                try:
                    self._ParseDistributedTrackingIdentifier(
                        parser_mediator, mft_attribute.droid_file_identifier,
                        display_name)

                except (TypeError, ValueError) as exception:
                    parser_mediator.ProduceExtractionError((
                        'unable to read droid file identifier from attribute: 0x{0:08x} '
                        'with error: {1!s}').format(
                            mft_attribute.attribute_type, exception))

            if mft_attribute.birth_droid_file_identifier:
                try:
                    self._ParseDistributedTrackingIdentifier(
                        parser_mediator, mft_attribute.droid_file_identifier,
                        display_name)

                except (TypeError, ValueError) as exception:
                    parser_mediator.ProduceExtractionError((
                        'unable to read birth droid file identifier from attribute: '
                        '0x{0:08x} with error: {1!s}').format(
                            mft_attribute.attribute_type, exception))
예제 #18
0
파일: winprefetch.py 프로젝트: juju4/plaso
    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows Prefetch file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        scca_file = pyscca.file()

        try:
            scca_file.open_file_object(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to open file with error: {0:s}'.format(exception))
            return

        format_version = scca_file.format_version
        executable_filename = scca_file.executable_filename
        prefetch_hash = scca_file.prefetch_hash
        run_count = scca_file.run_count
        number_of_volumes = scca_file.number_of_volumes

        volume_serial_numbers = []
        volume_device_paths = []
        path = u''

        for volume_information in iter(scca_file.volumes):
            volume_serial_number = volume_information.serial_number
            volume_device_path = volume_information.device_path

            volume_serial_numbers.append(volume_serial_number)
            volume_device_paths.append(volume_device_path)

            timestamp = volume_information.get_creation_time_as_integer()
            if timestamp:
                event_data = windows_events.WindowsVolumeEventData()
                event_data.device_path = volume_device_path
                event_data.origin = parser_mediator.GetFilename()
                event_data.serial_number = volume_serial_number

                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, eventdata.EventTimestamp.CREATION_TIME)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            for filename in iter(scca_file.filenames):
                if not filename:
                    continue

                if (filename.startswith(volume_device_path)
                        and filename.endswith(executable_filename)):
                    _, _, path = filename.partition(volume_device_path)

        mapped_files = []
        for entry_index, file_metrics in enumerate(
                scca_file.file_metrics_entries):
            mapped_file_string = file_metrics.filename
            if not mapped_file_string:
                parser_mediator.ProduceExtractionError(
                    u'missing filename for file metrics entry: {0:d}'.format(
                        entry_index))
                continue

            file_reference = file_metrics.file_reference
            if file_reference:
                mapped_file_string = (
                    u'{0:s} [MFT entry: {1:d}, sequence: {2:d}]').format(
                        mapped_file_string, file_reference & 0xffffffffffff,
                        file_reference >> 48)

            mapped_files.append(mapped_file_string)

        event_data = WinPrefetchExecutionEventData()
        event_data.executable = executable_filename
        event_data.mapped_files = mapped_files
        event_data.number_of_volumes = number_of_volumes
        event_data.path = path
        event_data.prefetch_hash = prefetch_hash
        event_data.run_count = run_count
        event_data.version = format_version
        event_data.volume_device_paths = volume_device_paths
        event_data.volume_serial_numbers = volume_serial_numbers

        timestamp = scca_file.get_last_run_time_as_integer(0)
        if not timestamp:
            parser_mediator.ProduceExtractionError(u'missing last run time')
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.LAST_RUNTIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # Check for the 7 older last run time values available since
        # format version 26.
        if format_version >= 26:
            for last_run_time_index in range(1, 8):
                timestamp = scca_file.get_last_run_time_as_integer(
                    last_run_time_index)
                if not timestamp:
                    continue

                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                date_time_description = u'Previous {0:s}'.format(
                    eventdata.EventTimestamp.LAST_RUNTIME)
                event = time_events.DateTimeValuesEvent(
                    date_time, date_time_description)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        scca_file.close()
예제 #19
0
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a matching entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): elements parsed from the file.

    Raises:
      ParseError: when the structure type is unknown.
    """
        if key not in self._SUPPORTED_KEYS:
            raise errors.ParseError(
                'Unable to parse record, unknown structure: {0:s}'.format(key))

        if key in ('chromeos_syslog_line', 'rsyslog_line'):
            date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
            iso8601_string = self._GetValueFromStructure(structure, 'datetime')

            try:
                date_time.CopyFromStringISO8601(iso8601_string)
            except ValueError:
                parser_mediator.ProduceExtractionWarning(
                    'invalid date time value: {0:s}'.format(iso8601_string))
                return

        else:
            # TODO: add support for fractional seconds.

            month = self._GetValueFromStructure(structure, 'month')
            try:
                month = timelib.MONTH_DICT.get(month.lower(), 0)
            except AttributeError:
                parser_mediator.ProduceExtractionWarning(
                    'invalid month value: {0!s}'.format(month))
                return

            if month != 0:
                self._UpdateYear(parser_mediator, month)

            day = self._GetValueFromStructure(structure, 'day')
            hours = self._GetValueFromStructure(structure, 'hour')
            minutes = self._GetValueFromStructure(structure, 'minute')
            seconds = self._GetValueFromStructure(structure, 'second')

            time_elements_tuple = (self._year_use, month, day, hours, minutes,
                                   seconds)

            try:
                date_time = dfdatetime_time_elements.TimeElements(
                    time_elements_tuple=time_elements_tuple)
                date_time.is_local_time = True
            except ValueError:
                parser_mediator.ProduceExtractionWarning(
                    'invalid date time value: {0!s}'.format(
                        time_elements_tuple))
                return

        plugin = None
        if key == 'syslog_comment':
            event_data = SyslogCommentEventData()
            event_data.body = self._GetValueFromStructure(structure, 'body')
            # TODO: pass line number to offset or remove.
            event_data.offset = 0

        else:
            event_data = SyslogLineEventData()
            event_data.body = self._GetValueFromStructure(structure, 'body')
            event_data.hostname = self._GetValueFromStructure(
                structure, 'hostname')
            # TODO: pass line number to offset or remove.
            event_data.offset = 0
            event_data.pid = self._GetValueFromStructure(structure, 'pid')
            event_data.reporter = self._GetValueFromStructure(
                structure, 'reporter')
            event_data.severity = self._GetValueFromStructure(
                structure, 'severity')

            plugin = self._plugin_by_reporter.get(event_data.reporter, None)
            if plugin:
                attributes = {
                    'body': event_data.body,
                    'hostname': event_data.hostname,
                    'pid': event_data.pid,
                    'reporter': event_data.reporter,
                    'severity': event_data.severity
                }

                try:
                    # TODO: pass event_data instead of attributes.
                    plugin.Process(parser_mediator, date_time, attributes)

                except errors.WrongPlugin:
                    plugin = None

        if not plugin:
            event = time_events.DateTimeValuesEvent(
                date_time,
                definitions.TIME_DESCRIPTION_WRITTEN,
                time_zone=parser_mediator.timezone)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #20
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        values_dict = self._GetValuesFromKey(registry_key)

        # Generate an event for the key.
        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = (registry_key.path).replace("\\", "/")
        event_data.values = ' '.join([
            '{0:s}: {1!s}'.format(name, value)
            for name, value in sorted(values_dict.items())
        ]) or None

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        if registry_key.number_of_subkeys == 0:
            error_string = 'Key: {0:s} missing subkeys.'.format(
                registry_key.path)
            parser_mediator.ProduceExtractionWarning(error_string)
            return

        for zone_key in registry_key.GetSubkeys():
            # TODO: these values are stored in the Description value of the
            # zone key. This solution will break on zone values that are larger
            # than 5.
            path = '{0:s}\\{1:s}'.format(registry_key.path,
                                         self._ZONE_NAMES[zone_key.name])

            settings = []

            # TODO: this plugin currently just dumps the values and does not
            # distinguish between what is a feature control or not.
            for value in zone_key.GetValues():
                # Ignore the default value.
                if not value.name:
                    continue

                if value.DataIsString():
                    value_string = value.GetDataAsObject()

                elif value.DataIsInteger():
                    value_integer = value.GetDataAsObject()
                    if value.name in self._KNOWN_PERMISSIONS_VALUE_NAMES:
                        value_string = self._CONTROL_VALUES_PERMISSIONS.get(
                            value_integer, 'UNKNOWN')
                    elif value.name == '1A00':
                        value_string = self._CONTROL_VALUES_1A00.get(
                            value_integer, 'UNKNOWN')
                    elif value.name == '1C00':
                        value_string = self._CONTROL_VALUES_1C00.get(
                            value_integer, 'UNKNOWN')
                    elif value.name == '1E05':
                        value_string = self._CONTROL_VALUES_SAFETY.get(
                            value_integer, 'UNKNOWN')
                    else:
                        value_string = '{0:d}'.format(value_integer)

                else:
                    value_string = '[{0:s}]'.format(value.data_type_string)

                if len(value.name) == 4 and value.name != 'Icon':
                    value_description = self._FEATURE_CONTROLS.get(
                        value.name, 'UNKNOWN')
                else:
                    value_description = self._FEATURE_CONTROLS.get(
                        value.name, '')

                if value_description:
                    feature_control = '[{0:s}] {1:s}: {2:s}'.format(
                        value.name, value_description, value_string)
                else:
                    feature_control = '[{0:s}]: {1:s}'.format(
                        value.name, value_string)

                settings.append(feature_control)

            event_data = MSIEZoneSettingsEventData()
            event_data.key_path = (path).replace("\\", "/")
            event_data.settings = ' '.join(sorted(settings))

            event = time_events.DateTimeValuesEvent(
                zone_key.last_written_time,
                definitions.TIME_DESCRIPTION_WRITTEN)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #21
0
    def ParseCall(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a call.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
      query (Optional[str]): query.
    """
        query_hash = hash(query)

        guid = self._GetRowValue(query_hash, row, 'guid')
        is_incoming = self._GetRowValue(query_hash, row, 'is_incoming')
        videostatus = self._GetRowValue(query_hash, row, 'videostatus')

        try:
            aux = guid
            if aux:
                aux_list = aux.split('-')
                src_aux = aux_list[0]
                dst_aux = aux_list[1]
            else:
                src_aux = 'Unknown [no GUID]'
                dst_aux = 'Unknown [no GUID]'
        except IndexError:
            src_aux = 'Unknown [{0:s}]'.format(guid)
            dst_aux = 'Unknown [{0:s}]'.format(guid)

        if is_incoming == '0':
            user_start_call = True
            source = src_aux

            ip_address = self._GetRowValue(query_hash, row, 'ip_address')
            if ip_address:
                destination = '{0:s} <{1:s}>'.format(dst_aux, ip_address)
            else:
                destination = dst_aux
        else:
            user_start_call = False
            source = src_aux
            destination = dst_aux

        call_identifier = self._GetRowValue(query_hash, row, 'id')

        event_data = SkypeCallEventData()
        event_data.dst_call = destination
        event_data.offset = call_identifier
        event_data.query = query
        event_data.src_call = source
        event_data.user_start_call = user_start_call
        event_data.video_conference = videostatus == '3'

        timestamp = self._GetRowValue(query_hash, row, 'try_call')
        event_data.call_type = 'WAITING'
        date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(date_time, 'Call from Skype')
        parser_mediator.ProduceEventWithEventData(event, event_data)

        try:
            timestamp = self._GetRowValue(query_hash, row, 'accept_call')
            timestamp = int(timestamp)
        except ValueError:
            timestamp = None

        if timestamp:
            event_data.call_type = 'ACCEPTED'
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Call from Skype')
            parser_mediator.ProduceEventWithEventData(event, event_data)

            try:
                call_duration = self._GetRowValue(query_hash, row,
                                                  'call_duration')
                call_duration = int(call_duration)
            except ValueError:
                parser_mediator.ProduceExtractionError(
                    'unable to determine when call: {0:s} was finished.'.
                    format(call_identifier))
                call_duration = None

            if call_duration:
                timestamp += call_duration
                event_data.call_type = 'FINISHED'
                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'Call from Skype')
                parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #22
0
    def _ReadEntryInternet(self, parser_mediator, file_object):
        """Extracts the information from an Internet password entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
    """
        record_offset = file_object.tell()
        try:
            record_header_struct = self.RECORD_HEADER_INET.parse_stream(
                file_object)
        except (IOError, construct.FieldError):
            parser_mediator.ProduceExtractionError(
                ('unable to parse record header structure at offset: '
                 '0x{0:08x}').format(record_offset))
            return

        (ssgp_hash, creation_time, last_modification_time, text_description,
         comments, entry_name, account_name) = self._ReadEntryHeader(
             parser_mediator, file_object, record_header_struct.record_header,
             record_offset)

        if not record_header_struct.where:
            where = 'N/A'
            protocol = 'N/A'
            type_protocol = 'N/A'

        else:
            offset = record_offset + record_header_struct.where - 1
            file_object.seek(offset, os.SEEK_SET)
            where = self.TEXT.parse_stream(file_object)
            where = codecs.decode(where, 'utf-8')

            offset = record_offset + record_header_struct.protocol - 1
            file_object.seek(offset, os.SEEK_SET)
            protocol = self.TYPE_TEXT.parse_stream(file_object)
            protocol = codecs.decode(protocol, 'utf-8')

            offset = record_offset + record_header_struct.type - 1
            file_object.seek(offset, os.SEEK_SET)
            type_protocol = self.TEXT.parse_stream(file_object)
            type_protocol = codecs.decode(type_protocol, 'utf-8')
            type_protocol = self._PROTOCOL_TRANSLATION_DICT.get(
                type_protocol, type_protocol)

            if record_header_struct.url:
                offset = record_offset + record_header_struct.url - 1
                file_object.seek(offset, os.SEEK_SET)
                url = self.TEXT.parse_stream(file_object)
                url = codecs.decode(url, 'utf-8')
                where = '{0:s}{1:s}'.format(where, url)

        # Move to the end of the record.
        next_record_offset = (record_offset +
                              record_header_struct.record_header.entry_length)
        file_object.seek(next_record_offset, os.SEEK_SET)

        event_data = KeychainInternetRecordEventData()
        event_data.account_name = account_name
        event_data.comments = comments
        event_data.entry_name = entry_name
        event_data.protocol = protocol
        event_data.ssgp_hash = ssgp_hash
        event_data.text_description = text_description
        event_data.type_protocol = type_protocol
        event_data.where = where

        if creation_time:
            event = time_events.DateTimeValuesEvent(
                creation_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if last_modification_time:
            event = time_events.DateTimeValuesEvent(
                last_modification_time,
                definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #23
0
    def _ParseFileReferenceKey(self, parser_mediator, file_reference_key):
        """Parses a file reference key (sub key of Root\\File\\%VOLUME%) for events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_reference_key (pyregf.key): file reference key.
    """
        event_data = AMCacheFileEventData()

        try:
            if '0000' in file_reference_key.name:
                # A NTFS file is a combination of MFT entry and sequence number.
                sequence_number, mft_entry = file_reference_key.name.split(
                    '0000')
                mft_entry = int(mft_entry, 16)
                sequence_number = int(sequence_number, 16)
                event_data.file_reference = '{0:d}-{1:d}'.format(
                    mft_entry, sequence_number)
            else:
                # A FAT file is a single number.
                file_reference = int(file_reference_key.name, 16)
                event_data.file_reference = '{0:d}'.format(file_reference)

        except (ValueError, TypeError):
            pass

        for value_name, attribute_name in self._FILE_REFERENCE_KEY_VALUES.items(
        ):
            value = file_reference_key.get_value_by_name(value_name)
            if not value:
                continue

            value_data = self._GetValueDataAsObject(parser_mediator, value)
            if attribute_name == 'sha1' and value_data.startswith('0000'):
                # Strip off the 4 leading zero's from the sha1 hash.
                value_data = value_data[4:]

            setattr(event_data, attribute_name, value_data)

        amcache_time_value = file_reference_key.get_value_by_name(
            self._AMCACHE_ENTRY_WRITE_TIME)
        if amcache_time_value:
            amcache_time = filetime.Filetime(
                amcache_time_value.get_data_as_integer())
            event = time_events.DateTimeValuesEvent(
                amcache_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        creation_time_value = file_reference_key.get_value_by_name(
            self._AMCACHE_FILE_CREATION_TIME)
        if creation_time_value:
            creation_time = filetime.Filetime(
                creation_time_value.get_data_as_integer())
            event = time_events.DateTimeValuesEvent(
                creation_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        modification_time_value = file_reference_key.get_value_by_name(
            self._AMCACHE_FILE_MODIFICATION_TIME)
        if modification_time_value:
            modification_time = filetime.Filetime(
                modification_time_value.get_data_as_integer())
            event = time_events.DateTimeValuesEvent(
                modification_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        compilation_time_value = file_reference_key.get_value_by_name(
            self._AMCACHE_COMPILATION_TIME)
        if compilation_time_value:
            link_time = posix_time.PosixTime(
                compilation_time_value.get_data_as_integer())
            event = time_events.DateTimeValuesEvent(
                link_time, definitions.TIME_DESCRIPTION_CHANGE)
            parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #24
0
    def ParseCookieRow(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
    """
        query_hash = hash(query)

        cookie_name = self._GetRowValue(query_hash, row, 'name')
        cookie_value = self._GetRowValue(query_hash, row, 'value')
        path = self._GetRowValue(query_hash, row, 'path')

        hostname = self._GetRowValue(query_hash, row, 'domain')
        if hostname.startswith('.'):
            hostname = hostname[1:]

        secure = self._GetRowValue(query_hash, row, 'secure')
        # The WebView database stores the secure flag as a integer type,
        # but we represent it as a boolean.
        secure = secure != 0

        if secure:
            scheme = 'https'
        else:
            scheme = 'http'

        url = '{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)

        event_data = WebViewCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_value
        event_data.host = hostname
        event_data.offset = self._GetRowValue(query_hash, row, '_id')
        event_data.path = path
        event_data.query = query
        event_data.secure = secure
        event_data.url = url

        timestamp = self._GetRowValue(query_hash, row, 'expires')
        if timestamp:
            date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
        else:
            date_time = dfdatetime_semantic_time.SemanticTime('Infinity')

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # Go through all cookie plugins to see if there are is any specific parsing
        # needed.
        for cookie_plugin in self._cookie_plugins:
            try:
                cookie_plugin.UpdateChainAndProcess(parser_mediator,
                                                    cookie_name=cookie_name,
                                                    cookie_data=cookie_value,
                                                    url=url)
            except errors.WrongPlugin:
                pass
예제 #25
0
파일: officemru.py 프로젝트: naaya17/plaso
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        # TODO: Test other Office versions to make sure this plugin is applicable.
        entries = []
        for registry_value in registry_key.GetValues():
            # Ignore any value not in the form: 'Item [0-9]+'.
            if not registry_value.name or not self._RE_VALUE_NAME.search(
                    registry_value.name):
                continue

            # Ignore any value that is empty or that does not contain a string.
            if not registry_value.data or not registry_value.DataIsString():
                continue

            value_string = registry_value.GetDataAsObject()
            values = self._RE_VALUE_DATA.findall(value_string)

            # Values will contain a list containing a tuple containing 2 values.
            if len(values) != 1 or len(values[0]) != 2:
                continue

            try:
                timestamp = int(values[0][0], 16)
            except ValueError:
                parser_mediator.ProduceExtractionWarning(
                    ('unable to convert filetime string to an integer for '
                     'value: {0:s}.').format(registry_value.name))
                continue

            event_data = OfficeMRUWindowsRegistryEventData()
            event_data.key_path = (registry_key.path).replace("\\", "/")
            # TODO: split value string in individual values.
            event_data.value_string = value_string

            entries.append('{0:s}: {1:s}'.format(registry_value.name,
                                                 value_string))

            if not timestamp:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

            # TODO: determine if this should be last written time.
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_WRITTEN)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        event_data = OfficeMRUListWindowsRegistryEventData()
        event_data.entries = ' '.join([value for value in entries]) or None
        event_data.key_path = (registry_key.path).replace("\\", "/")

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #26
0
    def _ParseRecord(self, parser_mediator, file_object, record_offset,
                     record_size):
        """Parses an INFO-2 record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.
    """
        record_data = file_object.read(record_size)

        try:
            ascii_filename = self._ASCII_STRING.parse(record_data)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError((
                'unable to parse recycler ASCII filename at offset: 0x{0:08x} '
                'with error: {1!s}').format(record_offset, exception))

        try:
            recycler_record_struct = self._RECYCLER_RECORD_STRUCT.parse(
                record_data[self._RECORD_INDEX_OFFSET:])
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                ('unable to parse recycler index record at offset: 0x{0:08x} '
                 'with error: {1!s}').format(
                     record_offset + self._RECORD_INDEX_OFFSET, exception))

        unicode_filename = None
        if record_size == 800:
            unicode_filename = binary.ReadUTF16(
                record_data[self._UNICODE_FILENAME_OFFSET:])

        ascii_filename = None
        if ascii_filename and parser_mediator.codepage:
            try:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage)
            except UnicodeDecodeError:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage, errors='replace')

        elif ascii_filename:
            ascii_filename = repr(ascii_filename)

        if recycler_record_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=recycler_record_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.drive_number = recycler_record_struct.drive_number
        event_data.original_filename = unicode_filename or ascii_filename
        event_data.file_size = recycler_record_struct.file_size
        event_data.offset = record_offset
        event_data.record_index = recycler_record_struct.index
        event_data.short_filename = ascii_filename

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_DELETED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
예제 #27
0
    def ParseContainersTable(self,
                             parser_mediator,
                             database=None,
                             table=None,
                             **unused_kwargs):
        """Parses a Containers table.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      database (Optional[pyesedb.file]): ESE database.
      table (Optional[pyesedb.table]): table.

    Raises:
      ValueError: if the database or table value is missing.
    """
        if database is None:
            raise ValueError('Missing database value.')

        if table is None:
            raise ValueError('Missing table value.')

        for esedb_record in table.records:
            if parser_mediator.abort:
                break

            record_values = self._GetRecordValues(parser_mediator, table.name,
                                                  esedb_record)

            event_data = MsieWebCacheContainersEventData()
            event_data.container_identifier = record_values.get(
                'ContainerId', None)
            event_data.directory = record_values.get('Directory', None)
            event_data.name = record_values.get('Name', None)
            event_data.set_identifier = record_values.get('SetId', None)

            timestamp = record_values.get('LastScavengeTime', None)
            if timestamp:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, 'Last Scavenge Time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            timestamp = record_values.get('LastAccessTime', None)
            if timestamp:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            container_identifier = record_values.get('ContainerId', None)
            container_name = record_values.get('Name', None)

            if not container_identifier or not container_name:
                continue

            if container_name in self._IGNORED_CONTAINER_NAMES:
                parser_mediator.ProduceExtractionWarning(
                    'Skipped container (ContainerId: {0:d}, Name: {1:s})'.
                    format(container_identifier, container_name))
                continue

            table_name = 'Container_{0:d}'.format(container_identifier)
            esedb_table = database.get_table_by_name(table_name)
            if not esedb_table:
                parser_mediator.ProduceExtractionWarning(
                    'Missing table: {0:s}'.format(table_name))
                continue

            self._ParseContainerTable(parser_mediator, esedb_table,
                                      container_name)
예제 #28
0
    def _ParseSubKey(self,
                     parser_mediator,
                     registry_key,
                     parent_path_segments,
                     codepage='cp1252'):
        """Extract event objects from a MRUListEx Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
      parent_path_segments (list[str]): parent shell item path segments.
      codepage (Optional[str]): extended ASCII string codepage.
    """
        entry_numbers = {}
        values_dict = {}
        value_strings = {}

        found_terminator = False
        for index, entry_number in self._ParseMRUListExValue(
                parser_mediator, registry_key):
            if entry_number == 0xffffffff:
                found_terminator = True
                continue

            if found_terminator:
                parser_mediator.ProduceExtractionError(
                    ('Found additional MRUListEx entries after terminator '
                     'in key: {0:s}.').format(registry_key.path))

                # Only create one parser error per terminator.
                found_terminator = False

            path_segment = self._ParseMRUListExEntryValue(parser_mediator,
                                                          registry_key,
                                                          index,
                                                          entry_number,
                                                          values_dict,
                                                          value_strings,
                                                          parent_path_segments,
                                                          codepage=codepage)

            entry_numbers[entry_number] = path_segment

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = registry_key.offset
        event_data.regvalue = values_dict
        event_data.source_append = self._SOURCE_APPEND
        event_data.urls = self.URLS

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        for entry_number, path_segment in iter(entry_numbers.items()):
            sub_key_name = '{0:d}'.format(entry_number)
            sub_key = registry_key.GetSubkeyByName(sub_key_name)
            if not sub_key:
                parser_mediator.ProduceExtractionError(
                    'Missing BagMRU sub key: {0:d} in key: {1:s}.'.format(
                        entry_number, registry_key.path))
                continue

            parent_path_segments.append(path_segment)
            self._ParseSubKey(parser_mediator,
                              sub_key,
                              parent_path_segments,
                              codepage=codepage)
            parent_path_segments.pop()
예제 #29
0
파일: opera.py 프로젝트: wankyou/plaso
  def _ParseRecord(self, parser_mediator, text_file_object):
    """Parses an Opera global history record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      text_file_object (dfvfs.TextFile): text file.

    Returns:
      bool: True if the record was successfully parsed.
    """
    try:
      title = text_file_object.readline()
    except UnicodeDecodeError:
      parser_mediator.ProduceExtractionWarning(
          'unable to read and decode title')
      return False

    if not title:
      return False

    try:
      url = text_file_object.readline()
    except UnicodeDecodeError:
      parser_mediator.ProduceExtractionWarning(
          'unable to read and decode url')
      return False

    try:
      timestamp = text_file_object.readline()
    except UnicodeDecodeError:
      parser_mediator.ProduceExtractionWarning(
          'unable to read and decode timestamp')
      return False

    try:
      popularity_index = text_file_object.readline()
    except UnicodeDecodeError:
      parser_mediator.ProduceExtractionWarning(
          'unable to read and decode popularity index')
      return False

    event_data = OperaGlobalHistoryEventData()

    event_data.url = url.strip()

    title = title.strip()
    if title != event_data.url:
      event_data.title = title

    popularity_index = popularity_index.strip()
    try:
      event_data.popularity_index = int(popularity_index, 10)
    except ValueError:
      parser_mediator.ProduceExtractionWarning(
          'unable to convert popularity index: {0:s}'.format(popularity_index))

    if event_data.popularity_index < 0:
      event_data.description = 'First and Only Visit'
    else:
      event_data.description = 'Last Visit'

    timestamp = timestamp.strip()
    try:
      timestamp = int(timestamp, 10)
    except ValueError:
      parser_mediator.ProduceExtractionWarning(
          'unable to convert timestamp: {0:s}'.format(timestamp))
      timestamp = None

    if timestamp is None:
      date_time = dfdatetime_semantic_time.InvalidTime()
    else:
      date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)

    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    return True
예제 #30
0
    def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
        """Extracts relevant user timestamp entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
        if 'name' not in match or 'uid' not in match:
            return

        account = match['name'][0]
        uid = match['uid'][0]

        for policy in match.get('passwordpolicyoptions', []):
            try:
                xml_policy = ElementTree.fromstring(policy)
            except (LookupError, ElementTree.ParseError,
                    expat.ExpatError) as exception:
                logger.error((
                    'Unable to parse XML structure for an user policy, account: '
                    '{0:s} and uid: {1!s}, with error: {2!s}').format(
                        account, uid, exception))
                continue

            for dict_elements in xml_policy.iterfind('dict'):
                key_values = [value.text for value in dict_elements]
                # Taking a list and converting it to a dict, using every other item
                # as the key and the other one as the value.
                policy_dict = dict(zip(key_values[0::2], key_values[1::2]))

            time_string = policy_dict.get('passwordLastSetTime', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse password last set time string: {0:s}'.
                        format(time_string))

                shadow_hash_data = match.get('ShadowHashData', None)
                if date_time and isinstance(shadow_hash_data, (list, tuple)):
                    # Extract the hash password information, which is stored in
                    # the attribute ShadowHashData which is a binary plist data.
                    try:
                        property_list = plistlib.loads(shadow_hash_data[0])
                    except plistlib.InvalidFileException as exception:
                        parser_mediator.ProduceExtractionWarning(
                            'unable to parse ShadowHashData with error: {0!s}'.
                            format(exception))
                        property_list = {}

                    password_hash = 'N/A'

                    salted_hash = property_list.get('SALTED-SHA512-PBKDF2',
                                                    None)
                    if salted_hash:
                        salt_hex_bytes = codecs.encode(salted_hash['salt'],
                                                       'hex')
                        salt_string = codecs.decode(salt_hex_bytes, 'ascii')
                        entropy_hex_bytes = codecs.encode(
                            salted_hash['entropy'], 'hex')
                        entropy_string = codecs.decode(entropy_hex_bytes,
                                                       'ascii')
                        password_hash = '$ml${0:d}${1:s}${2:s}'.format(
                            salted_hash['iterations'], salt_string,
                            entropy_string)

                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = (
                        'Last time {0:s} ({1!s}) changed the password: {2!s}'
                    ).format(account, uid, password_hash)
                    event_data.key = 'passwordLastSetTime'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_string = policy_dict.get('lastLoginTimestamp', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse last login time string: {0:s}'.format(
                            time_string))

                if date_time:
                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = 'Last login from {0:s} ({1!s})'.format(
                        account, uid)
                    event_data.key = 'lastLoginTimestamp'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_string = policy_dict.get('failedLoginTimestamp', None)
            if time_string and time_string != '2001-01-01T00:00:00Z':
                try:
                    date_time = dfdatetime_time_elements.TimeElements()
                    date_time.CopyFromStringISO8601(time_string)
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse failed login time string: {0:s}'.
                        format(time_string))

                if date_time:
                    event_data = plist_event.PlistTimeEventData()
                    event_data.desc = (
                        'Last failed login from {0:s} ({1!s}) ({2!s} times)'
                    ).format(account, uid,
                             policy_dict.get('failedLoginCount', 0))
                    event_data.key = 'failedLoginTimestamp'
                    event_data.root = self._ROOT

                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)