Exemplo n.º 1
0
  def GetEntries(
      self, parser_mediator, cookie_data=None, url=None, **kwargs):
    """Extracts event objects from the cookie.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (bytes): cookie data.
      url (str): URL or path where the cookie got set.
    """
    fields = cookie_data.split('.')
    number_of_fields = len(fields)

    if number_of_fields != 1:
      parser_mediator.ProduceExtractionError(
          'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
              number_of_fields, self.COOKIE_NAME))
      return

    try:
      # TODO: fix that we're losing precision here use dfdatetime.
      last_visit_posix_time = int(fields[0], 10) / 10000000
    except ValueError:
      last_visit_posix_time = None

    if last_visit_posix_time is not None:
      date_time = dfdatetime_posix_time.PosixTime(
          timestamp=last_visit_posix_time)
      timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
    else:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

    event_data = GoogleAnalyticsEventData('utmt')
    event_data.cookie_name = self.COOKIE_NAME
    event_data.url = url

    event = time_events.DateTimeValuesEvent(date_time, timestamp_description)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 2
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Windows Search DB file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """

        self.saveFile(file_object)
        win_ret = self.ParseRecords()
        for ret in win_ret:
            event_data = WinSearchDBExecutionEventData()
            event_data.ID = ret[self.COLUMN2PRINT["ID"]]
            event_data.Name = ret[self.COLUMN2PRINT["Name"]]
            event_data.IType = ret[self.COLUMN2PRINT["IType"]]
            event_data.Owner = ret[self.COLUMN2PRINT["Owner"]]
            event_data.IURL = ret[self.COLUMN2PRINT["IURL"]]
            event_data.IAttr = ret[self.COLUMN2PRINT["IAttr"]]
            event_data.IsFolder = ret[self.COLUMN2PRINT["IsFolder"]]
            event_data.Size = ret[self.COLUMN2PRINT["Size"]]
            event_data.GatherDT = ret[self.COLUMN2PRINT["GatherDT"]]
            event_data.CreateDT = ret[self.COLUMN2PRINT["CreateDT"]]
            event_data.ModifyDT = ret[self.COLUMN2PRINT["ModifyDT"]]
            event_data.AccessDT = ret[self.COLUMN2PRINT["AccessDT"]]
            event_data.SUMMARY = ret[self.COLUMN2PRINT["SUMMARY"]]
            event_data.Title = ret[self.COLUMN2PRINT["Title"]]
            event_data.Subject = ret[self.COLUMN2PRINT["Subject"]]
            event_data.Comment = ret[self.COLUMN2PRINT["Comment"]]
            event_data.Label = ret[self.COLUMN2PRINT["Label"]]
            event_data.Text = ret[self.COLUMN2PRINT["Text"]]
            event_data.APPName = ret[self.COLUMN2PRINT["APPName"]]

            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            desc = definitions.TIME_DESCRIPTION_NOT_A_TIME

            event = time_events.DateTimeValuesEvent(date_time, desc)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 3
0
  def Process(self, parser_mediator, root_item=None, **kwargs):
    """Parses an OLECF file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      root_item (Optional[pyolecf.item]): root item of the OLECF file.

    Raises:
      ValueError: If the root item is not set.
    """
    # This will raise if unhandled keyword arguments are passed.
    super(DefaultOLECFPlugin, self).Process(parser_mediator, **kwargs)

    if not root_item:
      raise ValueError(u'Root item not set.')

    if not self._ParseItem(parser_mediator, root_item):
      # If no event was produced, produce at least one for the root item.
      date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
      event = OLECFItemEvent(
          date_time, eventdata.EventTimestamp.CREATION_TIME, root_item)
      parser_mediator.ProduceEvent(event)
Exemplo n.º 4
0
    def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
        """Extracts relevant Airport entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
        if 'RememberedNetworks' not in match:
            return

        for wifi in match['RememberedNetworks']:
            ssid = wifi.get('SSIDString', 'UNKNOWN_SSID')
            security_type = wifi.get('SecurityType', 'UNKNOWN_SECURITY_TYPE')

            event_data = plist_event.PlistTimeEventData()
            event_data.desc = (
                '[WiFi] Connected to network: <{0:s}> using security {1:s}'
            ).format(ssid, security_type)
            event_data.key = 'item'
            event_data.root = '/RememberedNetworks'

            datetime_value = wifi.get('LastConnected', None)
            if datetime_value:
                timestamp = timelib.Timestamp.FromPythonDatetime(
                    datetime_value)
                date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
                    timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_WRITTEN)
            else:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)

            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 5
0
    def _ParseRecord(self,
                     parser_mediator,
                     record_index,
                     evtx_record,
                     recovered=False):
        """Extract data from a Windows XML EventLog (EVTX) record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      record_index (int): event record index.
      evtx_record (pyevtx.record): event record.
      recovered (Optional[bool]): True if the record was recovered.
    """
        event_data = self._GetEventData(parser_mediator,
                                        record_index,
                                        evtx_record,
                                        recovered=recovered)

        try:
            written_time = evtx_record.get_written_time_as_integer()
        except OverflowError as exception:
            parser_mediator.ProduceExtractionError(
                ('unable to read written time from event record: {0:d} '
                 'with error: {1!s}').format(record_index, exception))

            written_time = None

        if not written_time:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=written_time)

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 6
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        version_value = registry_key.GetValueByName('Version')
        count_subkey = registry_key.GetSubkeyByName('Count')

        if not version_value:
            parser_mediator.ProduceExtractionWarning('missing version value')
            return

        if not version_value.DataIsInteger():
            parser_mediator.ProduceExtractionWarning(
                'unsupported version value data type')
            return

        format_version = version_value.GetDataAsObject()
        if format_version not in (3, 5):
            parser_mediator.ProduceExtractionWarning(
                'unsupported format version: {0:d}'.format(format_version))
            return

        if not count_subkey:
            parser_mediator.ProduceExtractionWarning('missing count subkey')
            return

        userassist_entry_index = 0

        for registry_value in count_subkey.GetValues():
            try:
                # Note that Python 2 codecs.decode() does not support keyword arguments
                # such as encodings='rot-13'.
                value_name = codecs.decode(registry_value.name, 'rot-13')
            except UnicodeEncodeError as exception:
                logger.debug((
                    'Unable to decode UserAssist string: {0:s} with error: {1!s}.\n'
                    'Attempting piecewise decoding.').format(
                        registry_value.name, exception))

                characters = []
                for char in registry_value.name:
                    if ord(char) < 128:
                        try:
                            characters.append(char.decode('rot-13'))
                        except UnicodeEncodeError:
                            characters.append(char)
                    else:
                        characters.append(char)

                value_name = ''.join(characters)

            if format_version == 5:
                path_segments = value_name.split('\\')

                for segment_index, path_segment in enumerate(path_segments):
                    # Remove the { } from the path segment to get the GUID.
                    guid = path_segments[segment_index][1:-1]
                    path_segments[segment_index] = known_folder_ids.PATHS.get(
                        guid, path_segment)

                value_name = '\\'.join(path_segments)
                # Check if we might need to substitute values.
                if '%' in value_name:
                    # TODO: fix missing self._knowledge_base
                    # pylint: disable=no-member
                    environment_variables = self._knowledge_base.GetEnvironmentVariables(
                    )
                    value_name = path_helper.PathHelper.ExpandWindowsPath(
                        value_name, environment_variables)

            if value_name == 'UEME_CTLSESSION':
                continue

            if format_version == 3:
                entry_map = self._GetDataTypeMap('user_assist_entry_v3')
            elif format_version == 5:
                entry_map = self._GetDataTypeMap('user_assist_entry_v5')
            else:
                parser_mediator.ProduceExtractionWarning(
                    'unsupported format version: {0:d}'.format(format_version))
                continue

            if not registry_value.DataIsBinaryData():
                parser_mediator.ProduceExtractionWarning(
                    'unsupported value data type: {0:s}'.format(
                        registry_value.data_type_string))
                continue

            entry_data_size = entry_map.GetByteSize()
            value_data_size = len(registry_value.data)
            if entry_data_size != value_data_size:
                parser_mediator.ProduceExtractionWarning(
                    'unsupported value data size: {0:d}'.format(
                        value_data_size))
                continue

            try:
                user_assist_entry = self._ReadStructureFromByteStream(
                    registry_value.data, 0, entry_map)
            except (ValueError, errors.ParseError) as exception:
                parser_mediator.ProduceExtractionWarning(
                    'unable to parse UserAssist entry value with error: {0!s}'.
                    format(exception))
                continue

            event_data = UserAssistWindowsRegistryEventData()
            event_data.key_path = count_subkey.path
            event_data.number_of_executions = user_assist_entry.number_of_executions
            event_data.value_name = value_name

            if format_version == 3:
                if event_data.number_of_executions > 5:
                    event_data.number_of_executions -= 5

            elif format_version == 5:
                userassist_entry_index += 1

                event_data.application_focus_count = (
                    user_assist_entry.application_focus_count)
                event_data.application_focus_duration = (
                    user_assist_entry.application_focus_duration)
                event_data.entry_index = userassist_entry_index

            timestamp = user_assist_entry.last_execution_time
            if not timestamp:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 7
0
    def GetEntries(self,
                   parser_mediator,
                   cookie_data=None,
                   url=None,
                   **kwargs):
        """Extracts event objects from the cookie.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (bytes): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields not in (1, 6):
            parser_mediator.ProduceExtractionError(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        if number_of_fields == 1:
            domain_hash = None
            visitor_identifier = None
            first_visit_posix_time = None
            previous_visit_posix_time = None

            try:
                # TODO: fix that we're losing precision here use dfdatetime.
                last_visit_posix_time = int(fields[0], 10) / 10000000
            except ValueError:
                last_visit_posix_time = None

            number_of_sessions = None

        elif number_of_fields == 6:
            domain_hash = fields[0]
            visitor_identifier = fields[1]

            # TODO: Double check this time is stored in UTC and not local time.
            try:
                first_visit_posix_time = int(fields[2], 10)
            except ValueError:
                first_visit_posix_time = None

            try:
                previous_visit_posix_time = int(fields[3], 10)
            except ValueError:
                previous_visit_posix_time = None

            try:
                last_visit_posix_time = int(fields[4], 10)
            except ValueError:
                last_visit_posix_time = None

            try:
                number_of_sessions = int(fields[5], 10)
            except ValueError:
                number_of_sessions = None

        event_data = GoogleAnalyticsEventData('utma')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.domain_hash = domain_hash
        event_data.sessions = number_of_sessions
        event_data.url = url
        event_data.visitor_id = visitor_identifier

        if first_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=first_visit_posix_time)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Analytics Creation Time')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if previous_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=previous_visit_posix_time)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Analytics Previous Time')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        date_time = None
        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        elif first_visit_posix_time is None and previous_visit_posix_time is None:
            # If both creation_time and written_time are None produce an event
            # object without a timestamp.
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        if date_time is not None:
            event = time_events.DateTimeValuesEvent(date_time,
                                                    timestamp_description)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 8
0
    def ParseFileObject(self,
                        parser_mediator,
                        file_object,
                        display_name=None,
                        **kwargs):
        """Parses a Windows Shortcut (LNK) file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      display_name (Optional[str]): display name.
    """
        if not display_name:
            display_name = parser_mediator.GetDisplayName()

        lnk_file = pylnk.file()
        lnk_file.set_ascii_codepage(parser_mediator.codepage)

        try:
            lnk_file.open_file_object(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionError(
                u'unable to open file with error: {0:s}'.format(exception))
            return

        link_target = None
        if lnk_file.link_target_identifier_data:
            # TODO: change file_entry.name to display name once it is generated
            # correctly.
            display_name = parser_mediator.GetFilename()
            shell_items_parser = shell_items.ShellItemsParser(display_name)
            shell_items_parser.ParseByteStream(
                parser_mediator,
                lnk_file.link_target_identifier_data,
                codepage=parser_mediator.codepage)

            link_target = shell_items_parser.CopyToPath()

        access_time = lnk_file.get_file_access_time_as_integer()
        if access_time != 0:
            date_time = dfdatetime_filetime.Filetime(timestamp=access_time)
            event = WinLnkLinkEvent(date_time,
                                    definitions.TIME_DESCRIPTION_LAST_ACCESS,
                                    lnk_file, link_target)
            parser_mediator.ProduceEvent(event)

        creation_time = lnk_file.get_file_creation_time_as_integer()
        if creation_time != 0:
            date_time = dfdatetime_filetime.Filetime(timestamp=creation_time)
            event = WinLnkLinkEvent(date_time,
                                    definitions.TIME_DESCRIPTION_CREATION,
                                    lnk_file, link_target)
            parser_mediator.ProduceEvent(event)

        modification_time = lnk_file.get_file_modification_time_as_integer()
        if modification_time != 0:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=modification_time)
            event = WinLnkLinkEvent(date_time,
                                    definitions.TIME_DESCRIPTION_MODIFICATION,
                                    lnk_file, link_target)
            parser_mediator.ProduceEvent(event)

        if access_time == 0 and creation_time == 0 and modification_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
            event = WinLnkLinkEvent(date_time,
                                    definitions.TIME_DESCRIPTION_NOT_A_TIME,
                                    lnk_file, link_target)
            parser_mediator.ProduceEvent(event)

        if lnk_file.droid_file_identifier:
            try:
                self._ParseDistributedTrackingIdentifier(
                    parser_mediator, lnk_file.droid_file_identifier,
                    display_name)
            except (TypeError, ValueError) as exception:
                parser_mediator.ProduceExtractionError(
                    u'unable to read droid file identifier with error: {0:s}.'.
                    format(exception))

        if lnk_file.birth_droid_file_identifier:
            try:
                self._ParseDistributedTrackingIdentifier(
                    parser_mediator, lnk_file.birth_droid_file_identifier,
                    display_name)
            except (TypeError, ValueError) as exception:
                parser_mediator.ProduceExtractionError(
                    (u'unable to read birth droid file identifier with error: '
                     u'{0:s}.').format(exception))

        lnk_file.close()
Exemplo n.º 9
0
    def _ParseRecord(self, parser_mediator, file_object, record_offset,
                     record_size):
        """Parses an INFO-2 record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.
    """
        record_data = file_object.read(record_size)

        try:
            ascii_filename = self._ASCII_STRING.parse(record_data)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError((
                u'unable to parse recycler ASCII filename at offset: 0x{0:08x} '
                u'with error: {1:s}').format(record_offset, exception))

        try:
            recycler_record_struct = self._RECYCLER_RECORD_STRUCT.parse(
                record_data[self._RECORD_INDEX_OFFSET:])
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                (u'unable to parse recycler index record at offset: 0x{0:08x} '
                 u'with error: {1:s}').format(
                     record_offset + self._RECORD_INDEX_OFFSET, exception))

        unicode_filename = None
        if record_size == 800:
            unicode_filename = binary.ReadUTF16(
                record_data[self._UNICODE_FILENAME_OFFSET:])

        ascii_filename = None
        if ascii_filename and parser_mediator.codepage:
            try:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage)
            except UnicodeDecodeError:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage, errors=u'replace')

        elif ascii_filename:
            ascii_filename = repr(ascii_filename)

        if recycler_record_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=recycler_record_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.drive_number = recycler_record_struct.drive_number
        event_data.original_filename = unicode_filename or ascii_filename
        event_data.file_size = recycler_record_struct.file_size
        event_data.offset = record_offset
        event_data.record_index = recycler_record_struct.index
        event_data.short_filename = ascii_filename

        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.DELETED_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 10
0
  def testCopyToStatTimeTuple(self):
    """Tests the CopyToStatTimeTuple function."""
    semantic_time_object = semantic_time.SemanticTime()

    stat_time_tuple = semantic_time_object.CopyToStatTimeTuple()
    self.assertEqual(stat_time_tuple, (None, None))
Exemplo n.º 11
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses an fseventsd file.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the header cannot be parsed.
    """
        page_header_map = self._GetDataTypeMap('dls_page_header')

        try:
            page_header, file_offset = self._ReadStructureFromFileObject(
                file_object, 0, page_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile(
                'Unable to parse page header with error: {0!s}'.format(
                    exception))

        if page_header.signature not in self._DLS_SIGNATURES:
            raise errors.UnableToParseFile('Invalid file signature')

        current_page_end = page_header.page_size

        file_entry = parser_mediator.GetFileEntry()
        date_time = self._GetParentModificationTime(file_entry)
        # TODO: Change this to use a more representative time definition (time span)
        # when https://github.com/log2timeline/dfdatetime/issues/65 is resolved.
        if date_time:
            timestamp_description = definitions.TIME_DESCRIPTION_RECORDED
        else:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME
        event = time_events.DateTimeValuesEvent(date_time,
                                                timestamp_description)

        file_size = file_object.get_size()
        while file_offset < file_size:
            if file_offset >= current_page_end:
                try:
                    page_header, header_size = self._ParseDLSPageHeader(
                        file_object, file_offset)
                except errors.ParseError as exception:
                    parser_mediator.ProduceExtractionWarning(
                        'Unable to parse page header with error: {0!s}'.format(
                            exception))
                    break

                current_page_end += page_header.page_size
                file_offset += header_size
                continue

            if page_header.signature == self._DLS_V1_SIGNATURE:
                record_map = self._GetDataTypeMap('dls_record_v1')
            else:
                record_map = self._GetDataTypeMap('dls_record_v2')

            try:
                record, record_length = self._ReadStructureFromFileObject(
                    file_object, file_offset, record_map)
                file_offset += record_length
            except (ValueError, errors.ParseError) as exception:
                parser_mediator.ProduceExtractionWarning(
                    'Unable to parse page record with error: {0!s}'.format(
                        exception))
                break

            event_data = self._BuildEventData(record)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 12
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        value = registry_key.GetValueByName(u'AppCompatCache')
        if not value:
            return

        value_data = value.data
        value_data_size = len(value.data)

        format_type = self._CheckSignature(value_data)
        if not format_type:
            parser_mediator.ProduceExtractionError(
                u'Unsupported signature in AppCompatCache key: {0:s}'.format(
                    registry_key.path))
            return

        header_object = self._ParseHeader(format_type, value_data)

        # On Windows Vista and 2008 when the cache is empty it will
        # only consist of the header.
        if value_data_size <= header_object.header_size:
            return

        cached_entry_offset = header_object.header_size
        cached_entry_size = self._DetermineCacheEntrySize(
            format_type, value_data, cached_entry_offset)

        if not cached_entry_size:
            parser_mediator.ProduceExtractionError((
                u'Unsupported cached entry size at offset {0:d} in AppCompatCache '
                u'key: {1:s}').format(cached_entry_offset, registry_key.path))
            return

        cached_entry_index = 0
        while cached_entry_offset < value_data_size:
            cached_entry_object = self._ParseCachedEntry(
                format_type, value_data, cached_entry_offset,
                cached_entry_size)

            event_data = AppCompatCacheEventData()
            event_data.entry_index = cached_entry_index + 1
            event_data.key_path = registry_key.path
            event_data.offset = cached_entry_offset
            event_data.path = cached_entry_object.path

            if cached_entry_object.last_modification_time is not None:
                if not cached_entry_object.last_modification_time:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        u'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_modification_time)

                # TODO: refactor to file modification event.
                event = time_events.DateTimeValuesEvent(
                    date_time, u'File Last Modification Time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            if cached_entry_object.last_update_time is not None:
                if not cached_entry_object.last_update_time:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        u'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_update_time)

                # TODO: refactor to process run event.
                event = time_events.DateTimeValuesEvent(
                    date_time, eventdata.EventTimestamp.LAST_RUNTIME)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            cached_entry_offset += cached_entry_object.cached_entry_size
            cached_entry_index += 1

            if (header_object.number_of_cached_entries != 0
                    and cached_entry_index >=
                    header_object.number_of_cached_entries):
                break
Exemplo n.º 13
0
    def _ParseGUIDTable(self, parser_mediator, cache, database, esedb_table,
                        values_map, event_data_class):
        """Parses a table with a GUID as name.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (ESEDBCache): cache, which contains information about
          the identifiers stored in the SruDbIdMapTable table.
      database (pyesedb.file): ESE database.
      esedb_table (pyesedb.table): table.
      values_map (dict[str, str]): mapping of table columns to event data
          attribute names.
      event_data_class (type): event data class.

    Raises:
      ValueError: if the cache, database or table value is missing.
    """
        if cache is None:
            raise ValueError('Missing cache value.')

        if database is None:
            raise ValueError('Missing database value.')

        if esedb_table is None:
            raise ValueError('Missing table value.')

        identifier_mappings = self._GetIdentifierMappings(
            parser_mediator, cache, database)

        for esedb_record in esedb_table.records:
            if parser_mediator.abort:
                break

            record_values = self._GetRecordValues(
                parser_mediator,
                esedb_table.name,
                esedb_record,
                value_mappings=self._GUID_TABLE_VALUE_MAPPINGS)

            event_data = event_data_class()

            for attribute_name, column_name in values_map.items():
                record_value = record_values.get(column_name, None)
                if attribute_name in ('application', 'user_identifier'):
                    # Human readable versions of AppId and UserId values are stored
                    # in the SruDbIdMapTable table; also referred to as identifier
                    # mapping. Here we look up the numeric identifier stored in the GUID
                    # table in SruDbIdMapTable.
                    record_value = identifier_mappings.get(
                        record_value, record_value)

                setattr(event_data, attribute_name, record_value)

            timestamp = record_values.get('TimeStamp')
            if timestamp:
                date_time = dfdatetime_ole_automation_date.OLEAutomationDate(
                    timestamp=timestamp)
                timestamp_description = definitions.TIME_DESCRIPTION_SAMPLE
            else:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
                timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

            event = time_events.DateTimeValuesEvent(date_time,
                                                    timestamp_description)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            timestamp = record_values.get('ConnectStartTime')
            if timestamp:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_FIRST_CONNECTED)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 14
0
    def ParseCookieRow(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
    """
        query_hash = hash(query)

        cookie_name = self._GetRowValue(query_hash, row, 'name')
        cookie_value = self._GetRowValue(query_hash, row, 'value')
        path = self._GetRowValue(query_hash, row, 'path')

        hostname = self._GetRowValue(query_hash, row, 'domain')
        if hostname.startswith('.'):
            hostname = hostname[1:]

        secure = self._GetRowValue(query_hash, row, 'secure')
        # The WebView database stores the secure flag as a integer type,
        # but we represent it as a boolean.
        secure = secure != 0

        if secure:
            scheme = 'https'
        else:
            scheme = 'http'

        url = '{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)

        event_data = WebViewCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_value
        event_data.host = hostname
        event_data.offset = self._GetRowValue(query_hash, row, '_id')
        event_data.path = path
        event_data.query = query
        event_data.secure = secure
        event_data.url = url

        timestamp = self._GetRowValue(query_hash, row, 'expires')
        if timestamp:
            date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
        else:
            date_time = dfdatetime_semantic_time.SemanticTime('Infinity')

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # Go through all cookie plugins to see if there are is any specific parsing
        # needed.
        for cookie_plugin in self._cookie_plugins:
            try:
                cookie_plugin.UpdateChainAndProcess(parser_mediator,
                                                    cookie_name=cookie_name,
                                                    cookie_data=cookie_value,
                                                    url=url)
            except errors.WrongPlugin:
                pass
Exemplo n.º 15
0
  def GetEntries(
      self, parser_mediator, cookie_data=None, url=None, **kwargs):
    """Extracts event objects from the cookie.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (str): cookie data.
      url (str): URL or path where the cookie got set.
    """
    fields = cookie_data.split('.')
    number_of_fields = len(fields)

    if number_of_fields > 5:
      variables = '.'.join(fields[4:])
      fields = fields[0:4]
      fields.append(variables)
      number_of_fields = len(fields)

    if number_of_fields not in (1, 5):
      parser_mediator.ProduceExtractionWarning(
          'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
              number_of_fields, self.COOKIE_NAME))
      return

    if number_of_fields == 1:
      domain_hash = None

      try:
        # TODO: fix that we're losing precision here use dfdatetime.
        last_visit_posix_time = int(fields[0], 10) / 10000000
      except ValueError:
        last_visit_posix_time = None

      number_of_sessions = None
      number_of_sources = None
      extra_attributes = {}

    elif number_of_fields == 5:
      domain_hash = fields[0]

      try:
        last_visit_posix_time = int(fields[1], 10)
      except ValueError:
        last_visit_posix_time = None

      try:
        number_of_sessions = int(fields[2], 10)
      except ValueError:
        number_of_sessions = None

      try:
        number_of_sources = int(fields[3], 10)
      except ValueError:
        number_of_sources = None

      extra_variables = fields[4].split('|')

      extra_attributes = {}
      for variable in extra_variables:
        key, _, value = variable.partition('=')
        extra_attributes[key] = urlparse.unquote(value)

    if last_visit_posix_time is not None:
      date_time = dfdatetime_posix_time.PosixTime(
          timestamp=last_visit_posix_time)
      timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
    else:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

    event_data = GoogleAnalyticsEventData('utmz')
    event_data.cookie_name = self.COOKIE_NAME
    event_data.domain_hash = domain_hash
    event_data.sessions = number_of_sessions
    event_data.sources = number_of_sources
    event_data.url = url

    for key, value in extra_attributes.items():
      setattr(event_data, key, value)

    event = time_events.DateTimeValuesEvent(date_time, timestamp_description)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 16
0
  def testCopyToDateTimeString(self):
    """Tests the CopyToDateTimeString function."""
    semantic_time_object = semantic_time.SemanticTime(string='Never')

    date_time_string = semantic_time_object.CopyToDateTimeString()
    self.assertEqual(date_time_string, 'Never')
Exemplo n.º 17
0
  def testCopyToDateTimeStringISO8601(self):
    """Tests the CopyToDateTimeStringISO8601 function."""
    semantic_time_object = semantic_time.SemanticTime(string='Never')

    date_time_string = semantic_time_object.CopyToDateTimeStringISO8601()
    self.assertIsNone(date_time_string)
Exemplo n.º 18
0
    def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
        """Parses an USN change journal.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal.

    Raises:
      ParseError: if an USN change journal record cannot be parsed.
    """
        if not usn_change_journal:
            return

        usn_record_map = self._GetDataTypeMap('usn_record_v2')

        usn_record_data = usn_change_journal.read_usn_record()
        while usn_record_data:
            current_offset = usn_change_journal.get_offset()

            try:
                usn_record = self._ReadStructureFromByteStream(
                    usn_record_data, current_offset, usn_record_map)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError((
                    'Unable to parse USN record at offset: 0x{0:08x} with error: '
                    '{1!s}').format(current_offset, exception))

            # Per MSDN we need to use name offset for forward compatibility.
            name_offset = usn_record.name_offset - 60
            utf16_stream = usn_record.name[name_offset:usn_record.name_size]

            try:
                name_string = utf16_stream.decode('utf-16-le')
            except (UnicodeDecodeError, UnicodeEncodeError) as exception:
                name_string = utf16_stream.decode('utf-16-le',
                                                  errors='replace')
                parser_mediator.ProduceExtractionWarning((
                    'unable to decode USN record name string with error: '
                    '{0:s}. Characters that cannot be decoded will be replaced '
                    'with "?" or "\\ufffd".').format(exception))

            event_data = NTFSUSNChangeEventData()
            event_data.file_attribute_flags = usn_record.file_attribute_flags
            event_data.file_reference = usn_record.file_reference
            event_data.filename = name_string
            event_data.offset = current_offset
            event_data.parent_file_reference = usn_record.parent_file_reference
            event_data.update_reason_flags = usn_record.update_reason_flags
            event_data.update_sequence_number = usn_record.update_sequence_number
            event_data.update_source_flags = usn_record.update_source_flags

            if not usn_record.update_date_time:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=usn_record.update_date_time)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            usn_record_data = usn_change_journal.read_usn_record()
Exemplo n.º 19
0
  def testGetDate(self):
    """Tests the GetDate function."""
    semantic_time_object = semantic_time.SemanticTime()

    date_tuple = semantic_time_object.GetDate()
    self.assertEqual(date_tuple, (None, None, None))
Exemplo n.º 20
0
    def _ParseCookieRecord(self, parser_mediator, page_data, page_offset):
        """Parses a cookie record

    Args:
      parser_mediator (ParserMediator): parser mediator.
      page_data (bytes): page data.
      page_offset (int): offset of the cookie record relative to the start
          of the page.
    """
        try:
            cookie = self._COOKIE_RECORD.parse(page_data[page_offset:])
        except construct.FieldError:
            message = 'Unable to read cookie record at offset: {0:d}'.format(
                page_offset)
            parser_mediator.ProduceExtractionError(message)
            return

        # The offset is determined by the range between the start of the current
        # offset until the start of the next offset. Thus we need to determine
        # the proper ordering of the offsets, since they are not always in the
        # same ordering.
        offset_dict = {
            cookie.url_offset: 'url',
            cookie.name_offset: 'name',
            cookie.value_offset: 'value',
            cookie.path_offset: 'path'
        }

        offsets = sorted(offset_dict.keys())
        offsets.append(cookie.size + page_offset)

        # TODO: Find a better approach to parsing the data than this.
        data_dict = {}
        for current_offset in range(0, len(offsets) - 1):
            # Get the current offset and the offset for the next entry.
            start, end = offsets[current_offset:current_offset + 2]
            value = offset_dict.get(offsets[current_offset])
            # Read the data.
            data_all = page_data[start + page_offset:end + page_offset]
            data, _, _ = data_all.partition(b'\x00')
            data_dict[value] = data

        event_data = SafariBinaryCookieEventData()
        event_data.cookie_name = data_dict.get('name')
        event_data.cookie_value = data_dict.get('value')
        event_data.flags = cookie.flags
        event_data.path = data_dict.get('path')
        event_data.url = data_dict.get('url')

        if cookie.creation_date:
            date_time = dfdatetime_cocoa_time.CocoaTime(
                timestamp=cookie.creation_date)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if cookie.expiration_date:
            date_time = dfdatetime_cocoa_time.CocoaTime(
                timestamp=cookie.expiration_date)
        else:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        for plugin in self._cookie_plugins:
            if parser_mediator.abort:
                break

            if event_data.cookie_name != plugin.COOKIE_NAME:
                continue

            try:
                plugin.UpdateChainAndProcess(
                    parser_mediator,
                    cookie_name=event_data.cookie_name,
                    cookie_data=event_data.cookie_value,
                    url=event_data.url)

            except Exception as exception:  # pylint: disable=broad-except
                parser_mediator.ProduceExtractionError(
                    'plugin: {0:s} unable to parse cookie with error: {1!s}'.
                    format(plugin.NAME, exception))
Exemplo n.º 21
0
    def _ParseUrl(self,
                  parser_mediator,
                  format_version,
                  cache_directories,
                  msiecf_item,
                  recovered=False):
        """Extract data from a MSIE Cache Files (MSIECF) URL item.

    Every item is stored as an event object, one for each timestamp.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      format_version (str): MSIECF format version.
      cache_directories (list[str]): cache directory names.
      msiecf_item (pymsiecf.url): MSIECF URL item.
      recovered (Optional[bool]): True if the item was recovered.
    """
        # The secondary time can be stored in either UTC or local time
        # this is dependent on what the index.dat file is used for.
        # Either the file path or location string can be used to distinguish
        # between the different type of files.
        timestamp = msiecf_item.get_primary_time_as_integer()
        if not timestamp:
            primary_date_time = dfdatetime_semantic_time.SemanticTime(
                'Not set')
        else:
            primary_date_time = dfdatetime_filetime.Filetime(
                timestamp=timestamp)
        primary_date_time_description = 'Primary Time'

        timestamp = msiecf_item.get_secondary_time_as_integer()
        secondary_date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
        secondary_date_time_description = 'Secondary Time'

        if msiecf_item.type:
            if msiecf_item.type == 'cache':
                primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_MODIFICATION)

            elif msiecf_item.type == 'cookie':
                primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_MODIFICATION)

            elif msiecf_item.type == 'history':
                primary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)

            elif msiecf_item.type == 'history-daily':
                primary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                # The secondary_date_time is in localtime normalize it to be in UTC.
                secondary_date_time.is_local_time = True

            elif msiecf_item.type == 'history-weekly':
                primary_date_time_description = definitions.TIME_DESCRIPTION_CREATION
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                # The secondary_date_time is in localtime normalize it to be in UTC.
                secondary_date_time.is_local_time = True

        http_headers = ''
        if msiecf_item.type and msiecf_item.data:
            if msiecf_item.type == 'cache':
                if msiecf_item.data[:4] == b'HTTP':
                    # Make sure the HTTP headers are ASCII encoded.
                    # TODO: determine correct encoding currently indications that
                    # this could be the system narrow string codepage.
                    try:
                        http_headers = msiecf_item.data[:-1].decode('ascii')
                    except UnicodeDecodeError:
                        parser_mediator.ProduceExtractionError((
                            'unable to decode HTTP headers of URL record at offset: '
                            '0x{0:08x}. Characters that cannot be decoded will be '
                            'replaced with "?" or "\\ufffd".').format(
                                msiecf_item.offset))
                        http_headers = msiecf_item.data[:-1].decode(
                            'ascii', errors='replace')

            # TODO: parse data of other URL item type like history which requires
            # OLE VT parsing.

        event_data = MSIECFURLEventData()
        event_data.cached_filename = msiecf_item.filename
        event_data.cached_file_size = msiecf_item.cached_file_size
        event_data.cache_directory_index = msiecf_item.cache_directory_index
        event_data.http_headers = http_headers
        event_data.number_of_hits = msiecf_item.number_of_hits
        event_data.offset = msiecf_item.offset
        event_data.recovered = recovered
        event_data.url = msiecf_item.location

        if (event_data.cache_directory_index >= 0
                and event_data.cache_directory_index < len(cache_directories)):
            event_data.cache_directory_name = (
                cache_directories[event_data.cache_directory_index])

        event = time_events.DateTimeValuesEvent(primary_date_time,
                                                primary_date_time_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        if secondary_date_time.timestamp != 0:
            event = time_events.DateTimeValuesEvent(
                secondary_date_time,
                secondary_date_time_description,
                time_zone=parser_mediator.timezone)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        expiration_timestamp = msiecf_item.get_expiration_time_as_integer()
        if expiration_timestamp != 0:
            # The expiration time in MSIECF version 4.7 is stored as a FILETIME value
            # in version 5.2 it is stored as a FAT date time value.
            # Since the as_integer function returns the raw integer value we need to
            # apply the right conversion here.
            if format_version == '4.7':
                if expiration_timestamp == 0x7fffffffffffffff:
                    expiration_date_time = dfdatetime_semantic_time.SemanticTime(
                        'Never')
                else:
                    expiration_date_time = dfdatetime_filetime.Filetime(
                        timestamp=expiration_timestamp)
            else:
                if expiration_timestamp == 0xffffffff:
                    expiration_date_time = dfdatetime_semantic_time.SemanticTime(
                        'Never')
                else:
                    expiration_date_time = dfdatetime_fat_date_time.FATDateTime(
                        fat_date_time=expiration_timestamp)

            event = time_events.DateTimeValuesEvent(
                expiration_date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        last_checked_timestamp = msiecf_item.get_last_checked_time_as_integer()
        if last_checked_timestamp != 0:
            last_checked_date_time = dfdatetime_fat_date_time.FATDateTime(
                fat_date_time=last_checked_timestamp)

            event = time_events.DateTimeValuesEvent(
                last_checked_date_time,
                definitions.TIME_DESCRIPTION_LAST_CHECKED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 22
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        # TODO: Test other Office versions to make sure this plugin is applicable.
        values_dict = {}
        for registry_value in registry_key.GetValues():
            # Ignore any value not in the form: 'Item [0-9]+'.
            if not registry_value.name or not self._RE_VALUE_NAME.search(
                    registry_value.name):
                continue

            # Ignore any value that is empty or that does not contain a string.
            if not registry_value.data or not registry_value.DataIsString():
                continue

            value_string = registry_value.GetDataAsObject()
            values = self._RE_VALUE_DATA.findall(value_string)

            # Values will contain a list containing a tuple containing 2 values.
            if len(values) != 1 or len(values[0]) != 2:
                continue

            try:
                timestamp = int(values[0][0], 16)
            except ValueError:
                parser_mediator.ProduceExtractionWarning(
                    ('unable to convert filetime string to an integer for '
                     'value: {0:s}.').format(registry_value.name))
                continue

            event_data = OfficeMRUWindowsRegistryEventData()
            event_data.key_path = registry_key.path
            event_data.offset = registry_value.offset
            # TODO: split value string in individual values.
            event_data.value_string = value_string

            values_dict[registry_value.name] = value_string

            if not timestamp:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

            # TODO: determine if this should be last written time.
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_WRITTEN)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = registry_key.offset
        event_data.regvalue = values_dict
        event_data.source_append = self._SOURCE_APPEND

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 23
0
  def _ParseRecord(self, parser_mediator, page_data, record_offset):
    """Parses a record from the page data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      page_data (bytes): page data.
      record_offset (int): offset of the record relative to the start
          of the page.

    Raises:
      ParseError: when the record cannot be parsed.
    """
    record_header_map = self._GetDataTypeMap('binarycookies_record_header')

    try:
      record_header = self._ReadStructureFromByteStream(
          page_data[record_offset:], record_offset, record_header_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError((
          'Unable to map record header data at offset: 0x{0:08x} with error: '
          '{1!s}').format(record_offset, exception))

    event_data = SafariBinaryCookieEventData()
    event_data.flags = record_header.flags

    if record_header.url_offset:
      data_offset = record_offset + record_header.url_offset
      event_data.url = self._ParseCString(page_data, data_offset)

    if record_header.name_offset:
      data_offset = record_offset + record_header.name_offset
      event_data.cookie_name = self._ParseCString(page_data, data_offset)

    if record_header.path_offset:
      data_offset = record_offset + record_header.path_offset
      event_data.path = self._ParseCString(page_data, data_offset)

    if record_header.value_offset:
      data_offset = record_offset + record_header.value_offset
      event_data.cookie_value = self._ParseCString(page_data, data_offset)

    if record_header.creation_time:
      date_time = dfdatetime_cocoa_time.CocoaTime(
          timestamp=record_header.creation_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if record_header.expiration_time:
      date_time = dfdatetime_cocoa_time.CocoaTime(
          timestamp=record_header.expiration_time)
    else:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')

    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    for plugin in self._cookie_plugins:
      if parser_mediator.abort:
        break

      if event_data.cookie_name != plugin.COOKIE_NAME:
        continue

      try:
        plugin.UpdateChainAndProcess(
            parser_mediator, cookie_name=event_data.cookie_name,
            cookie_data=event_data.cookie_value, url=event_data.url)

      except Exception as exception:  # pylint: disable=broad-except
        parser_mediator.ProduceExtractionError(
            'plugin: {0:s} unable to parse cookie with error: {1!s}'.format(
                plugin.NAME, exception))
Exemplo n.º 24
0
    def ParseDestList(self, parser_mediator, olecf_item):
        """Parses the DestList OLECF item.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      olecf_item (pyolecf.item): OLECF item.

    Raises:
      UnableToParseFile: if the DestList cannot be parsed.
    """
        header_map = self._GetDataTypeMap('dest_list_header')

        try:
            header, entry_offset = self._ReadStructureFromFileObject(
                olecf_item, 0, header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile(
                'Unable to parse DestList header with error: {0!s}'.format(
                    exception))

        if header.format_version == 1:
            entry_map = self._GetDataTypeMap('dest_list_entry_v1')
        elif header.format_version in (3, 4):
            entry_map = self._GetDataTypeMap('dest_list_entry_v3')
        else:
            parser_mediator.ProduceExtractionError(
                'unsupported format version: {0:d}.'.format(
                    header.format_version))
            return

        while entry_offset < olecf_item.size:
            try:
                entry, entry_data_size = self._ReadStructureFromFileObject(
                    olecf_item, entry_offset, entry_map)
            except (ValueError, errors.ParseError) as exception:
                raise errors.UnableToParseFile(
                    'Unable to parse DestList entry with error: {0!s}'.format(
                        exception))

            display_name = 'DestList entry at offset: 0x{0:08x}'.format(
                entry_offset)

            try:
                droid_volume_identifier = self._ParseDistributedTrackingIdentifier(
                    parser_mediator, entry.droid_volume_identifier,
                    display_name)

            except (TypeError, ValueError) as exception:
                droid_volume_identifier = ''
                parser_mediator.ProduceExtractionError(
                    'unable to read droid volume identifier with error: {0!s}'.
                    format(exception))

            try:
                droid_file_identifier = self._ParseDistributedTrackingIdentifier(
                    parser_mediator, entry.droid_file_identifier, display_name)

            except (TypeError, ValueError) as exception:
                droid_file_identifier = ''
                parser_mediator.ProduceExtractionError(
                    'unable to read droid file identifier with error: {0!s}'.
                    format(exception))

            try:
                birth_droid_volume_identifier = (
                    self._ParseDistributedTrackingIdentifier(
                        parser_mediator, entry.birth_droid_volume_identifier,
                        display_name))

            except (TypeError, ValueError) as exception:
                birth_droid_volume_identifier = ''
                parser_mediator.ProduceExtractionError((
                    'unable to read birth droid volume identifier with error: '
                    '{0:s}').format(exception))

            try:
                birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier(
                    parser_mediator, entry.birth_droid_file_identifier,
                    display_name)

            except (TypeError, ValueError) as exception:
                birth_droid_file_identifier = ''
                parser_mediator.ProduceExtractionError(
                    ('unable to read birth droid file identifier with error: '
                     '{0:s}').format(exception))

            if entry.last_modification_time == 0:
                date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=entry.last_modification_time)

            event_data = AutomaticDestinationsDestListEntryEventData()
            event_data.birth_droid_file_identifier = birth_droid_file_identifier
            event_data.birth_droid_volume_identifier = birth_droid_volume_identifier
            event_data.droid_file_identifier = droid_file_identifier
            event_data.droid_volume_identifier = droid_volume_identifier
            event_data.entry_number = entry.entry_number
            event_data.hostname = entry.hostname.rstrip('\x00')
            event_data.offset = entry_offset
            event_data.path = entry.path.rstrip('\x00')
            event_data.pin_status = entry.pin_status

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            entry_offset += entry_data_size
Exemplo n.º 25
0
    def GetEntries(self,
                   parser_mediator,
                   cookie_data=None,
                   url=None,
                   **kwargs):
        """Extracts event objects from the cookie.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (bytes): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields > 5:
            variables = '.'.join(fields[4:])
            fields = fields[0:4]
            fields.append(variables)
            number_of_fields = len(fields)

        if number_of_fields not in (1, 5):
            parser_mediator.ProduceExtractionError(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        if number_of_fields == 1:
            domain_hash = None

            try:
                # TODO: fix that we're losing precision here use dfdatetime.
                last_visit_posix_time = int(fields[0], 10) / 10000000
            except ValueError:
                last_visit_posix_time = None

            number_of_sessions = None
            number_of_sources = None
            extra_attributes = {}

        elif number_of_fields == 5:
            domain_hash = fields[0]

            try:
                last_visit_posix_time = int(fields[1], 10)
            except ValueError:
                last_visit_posix_time = None

            try:
                number_of_sessions = int(fields[2], 10)
            except ValueError:
                number_of_sessions = None

            try:
                number_of_sources = int(fields[3], 10)
            except ValueError:
                number_of_sources = None

            extra_variables = fields[4].split('|')

            extra_attributes = {}
            for variable in extra_variables:
                key, _, value = variable.partition('=')

                # Cookies can have a variety of different encodings, usually ASCII or
                # UTF-8, and values may additionally be URL encoded. urllib only
                # correctly url-decodes ASCII strings, so we'll convert our string
                # to ASCII first.
                try:
                    ascii_value = value.encode('ascii')
                except UnicodeEncodeError:
                    ascii_value = value.encode('ascii', errors='replace')
                    parser_mediator.ProduceExtractionError(
                        'Cookie contains non 7-bit ASCII characters, which have been '
                        'replaced with a "?".')

                utf_stream = urlparse.unquote(ascii_value)

                try:
                    value_line = utf_stream.decode('utf-8')
                except UnicodeDecodeError:
                    value_line = utf_stream.decode('utf-8', errors='replace')
                    parser_mediator.ProduceExtractionError(
                        'Cookie value did not decode to Unicode string. Non UTF-8 '
                        'characters have been replaced.')

                extra_attributes[key] = value_line

        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        else:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        event_data = GoogleAnalyticsEventData('utmz')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.domain_hash = domain_hash
        event_data.sessions = number_of_sessions
        event_data.sources = number_of_sources
        event_data.url = url

        for key, value in iter(extra_attributes.items()):
            setattr(event_data, key, value)

        event = time_events.DateTimeValuesEvent(date_time,
                                                timestamp_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 26
0
  def testGetTimeOfDay(self):
    """Tests the GetTimeOfDay function."""
    semantic_time_object = semantic_time.SemanticTime()

    time_of_day_tuple = semantic_time_object.GetTimeOfDay()
    self.assertEqual(time_of_day_tuple, (None, None, None))
Exemplo n.º 27
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        version_value = registry_key.GetValueByName('Version')
        count_subkey = registry_key.GetSubkeyByName('Count')

        if not version_value:
            parser_mediator.ProduceExtractionError('missing version value')
            return

        if not version_value.DataIsInteger():
            parser_mediator.ProduceExtractionError(
                'unsupported version value data type')
            return

        format_version = version_value.GetDataAsObject()
        if format_version not in (3, 5):
            parser_mediator.ProduceExtractionError(
                'unsupported format version: {0:d}'.format(format_version))
            return

        if not count_subkey:
            parser_mediator.ProduceExtractionError('missing count subkey')
            return

        userassist_entry_index = 0

        for registry_value in count_subkey.GetValues():
            try:
                value_name = registry_value.name.decode('rot-13')
            except UnicodeEncodeError as exception:
                logging.debug((
                    'Unable to decode UserAssist string: {0:s} with error: {1!s}.\n'
                    'Attempting piecewise decoding.').format(
                        registry_value.name, exception))

                characters = []
                for char in registry_value.name:
                    if ord(char) < 128:
                        try:
                            characters.append(char.decode('rot-13'))
                        except UnicodeEncodeError:
                            characters.append(char)
                    else:
                        characters.append(char)

                value_name = ''.join(characters)

            if format_version == 5:
                path_segments = value_name.split('\\')

                for segment_index in range(0, len(path_segments)):
                    # Remove the { } from the path segment to get the GUID.
                    guid = path_segments[segment_index][1:-1]
                    path_segments[segment_index] = known_folder_ids.PATHS.get(
                        guid, path_segments[segment_index])

                value_name = '\\'.join(path_segments)
                # Check if we might need to substitute values.
                if '%' in value_name:
                    # TODO: fix missing self._knowledge_base
                    # pylint: disable=no-member
                    environment_variables = self._knowledge_base.GetEnvironmentVariables(
                    )
                    value_name = path_helper.PathHelper.ExpandWindowsPath(
                        value_name, environment_variables)

            value_data_size = len(registry_value.data)
            if not registry_value.DataIsBinaryData():
                parser_mediator.ProduceExtractionError(
                    'unsupported value data type: {0:s}'.format(
                        registry_value.data_type_string))

            elif value_name == 'UEME_CTLSESSION':
                pass

            elif format_version == 3:
                if value_data_size != self._USERASSIST_V3_STRUCT.sizeof():
                    parser_mediator.ProduceExtractionError(
                        'unsupported value data size: {0:d}'.format(
                            value_data_size))

                else:
                    parsed_data = self._USERASSIST_V3_STRUCT.parse(
                        registry_value.data)
                    timestamp = parsed_data.get('timestamp', None)

                    number_of_executions = parsed_data.get(
                        'number_of_executions', None)
                    if number_of_executions is not None and number_of_executions > 5:
                        number_of_executions -= 5

                    event_data = UserAssistWindowsRegistryEventData()
                    event_data.key_path = count_subkey.path
                    event_data.number_of_executions = number_of_executions
                    event_data.offset = registry_value.offset
                    event_data.value_name = value_name

                    if not timestamp:
                        date_time = dfdatetime_semantic_time.SemanticTime(
                            'Not set')
                    else:
                        date_time = dfdatetime_filetime.Filetime(
                            timestamp=timestamp)

                    # TODO: check if last written is correct.
                    event = time_events.DateTimeValuesEvent(
                        date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            elif format_version == 5:
                if value_data_size != self._USERASSIST_V5_STRUCT.sizeof():
                    parser_mediator.ProduceExtractionError(
                        'unsupported value data size: {0:d}'.format(
                            value_data_size))

                parsed_data = self._USERASSIST_V5_STRUCT.parse(
                    registry_value.data)

                userassist_entry_index += 1
                timestamp = parsed_data.get('timestamp', None)

                event_data = UserAssistWindowsRegistryEventData()
                event_data.application_focus_count = parsed_data.get(
                    'application_focus_count', None)
                event_data.application_focus_duration = parsed_data.get(
                    'application_focus_duration', None)
                event_data.entry_index = userassist_entry_index
                event_data.key_path = count_subkey.path
                event_data.number_of_executions = parsed_data.get(
                    'number_of_executions', None)
                event_data.offset = count_subkey.offset
                event_data.value_name = value_name

                if not timestamp:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=timestamp)

                # TODO: check if last written is correct.
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_WRITTEN)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 28
0
  def testGetPlasoTimestamp(self):
    """Tests the GetPlasoTimestamp function."""
    semantic_time_object = semantic_time.SemanticTime()

    micro_posix_timestamp = semantic_time_object.GetPlasoTimestamp()
    self.assertEqual(micro_posix_timestamp, 0)
Exemplo n.º 29
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        value = registry_key.GetValueByName('AppCompatCache')
        if not value:
            return

        value_data = value.data
        value_data_size = len(value.data)

        format_type = self._CheckSignature(value_data)
        if not format_type:
            parser_mediator.ProduceExtractionWarning(
                'Unsupported signature in AppCompatCache key: {0:s}'.format(
                    registry_key.path))
            return

        header_object = self._ParseHeader(format_type, value_data)

        # On Windows Vista and 2008 when the cache is empty it will
        # only consist of the header.
        if value_data_size <= header_object.header_size:
            return

        cached_entry_offset = header_object.header_size

        self._cached_entry_data_type_map = self._GetCachedEntryDataTypeMap(
            format_type, value_data, cached_entry_offset)
        if not self._cached_entry_data_type_map:
            raise errors.ParseError(
                'Unable to determine cached entry data type.')

        parse_cached_entry_function = None
        if format_type == self._FORMAT_TYPE_XP:
            parse_cached_entry_function = self._ParseCachedEntryXP
        elif format_type == self._FORMAT_TYPE_2003:
            parse_cached_entry_function = self._ParseCachedEntry2003
        elif format_type == self._FORMAT_TYPE_VISTA:
            parse_cached_entry_function = self._ParseCachedEntryVista
        elif format_type == self._FORMAT_TYPE_7:
            parse_cached_entry_function = self._ParseCachedEntry7
        elif format_type == self._FORMAT_TYPE_8:
            parse_cached_entry_function = self._ParseCachedEntry8
        elif format_type == self._FORMAT_TYPE_10:
            parse_cached_entry_function = self._ParseCachedEntry10

        cached_entry_index = 0
        while cached_entry_offset < value_data_size:
            cached_entry_object = parse_cached_entry_function(
                value_data, cached_entry_offset)

            event_data = AppCompatCacheEventData()
            event_data.entry_index = cached_entry_index + 1
            event_data.key_path = (registry_key.path).replace("\\", "/")
            event_data.offset = cached_entry_offset
            event_data.path = cached_entry_object.path

            if cached_entry_object.last_modification_time is not None:
                if not cached_entry_object.last_modification_time:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_modification_time)

                # TODO: refactor to file modification event.
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File Last Modification Time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            if cached_entry_object.last_update_time is not None:
                if not cached_entry_object.last_update_time:
                    date_time = dfdatetime_semantic_time.SemanticTime(
                        'Not set')
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_update_time)

                # TODO: refactor to process run event.
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            cached_entry_offset += cached_entry_object.cached_entry_size
            cached_entry_index += 1

            if (header_object.number_of_cached_entries != 0
                    and cached_entry_index >=
                    header_object.number_of_cached_entries):
                break
Exemplo n.º 30
0
  def testCopyFromDateTimeString(self):
    """Tests the CopyFromDateTimeString function."""
    semantic_time_object = semantic_time.SemanticTime()

    semantic_time_object.CopyFromDateTimeString('Never')
    self.assertEqual(semantic_time_object.string, 'Never')