Ejemplo n.º 1
0
    def _ParseFiletime(self, byte_stream):
        """Parses a FILETIME date and time value from a byte stream.

    Args:
      byte_stream (bytes): byte stream.

    Returns:
      dfdatetime.DateTimeValues: a FILETIME date and time values or a semantic
        date and time values if the FILETIME date and time value is not set.

    Raises:
      ParseError: if the FILETIME could not be parsed.
    """
        filetime_map = self._GetDataTypeMap('filetime')

        try:
            filetime = self._ReadStructureFromByteStream(
                byte_stream, 0, filetime_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                'Unable to parse FILETIME value with error: {0!s}'.format(
                    exception))

        if filetime == 0:
            return dfdatetime_semantic_time.NotSet()

        try:
            return dfdatetime_filetime.Filetime(timestamp=filetime)
        except ValueError:
            raise errors.ParseError(
                'Invalid FILETIME value: 0x{0:08x}'.format(filetime))
Ejemplo n.º 2
0
  def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
    """Extracts relevant Airport entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
    """
    if 'RememberedNetworks' not in match:
      return

    for wifi in match['RememberedNetworks']:
      ssid = wifi.get('SSIDString', 'UNKNOWN_SSID')
      security_type = wifi.get('SecurityType', 'UNKNOWN_SECURITY_TYPE')

      event_data = plist_event.PlistTimeEventData()
      event_data.desc = (
          '[WiFi] Connected to network: <{0:s}> using security {1:s}').format(
              ssid, security_type)
      event_data.key = 'item'
      event_data.root = '/RememberedNetworks'

      datetime_value = wifi.get('LastConnected', None)
      if datetime_value:
        date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()
        date_time.CopyFromDatetime(datetime_value)
      else:
        date_time = dfdatetime_semantic_time.NotSet()

      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 3
0
  def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs):
    """Extracts launchd information from the plist.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
        and other components, such as storage and dfvfs.
      top_level (Optional[dict[str, object]]): plist top-level item.
    """

    label = top_level.get('Label')
    command = top_level.get('Program', '')
    program_arguments = top_level.get('ProgramArguments')
    for argument in program_arguments:
      command += " %s" % argument

    user_name = top_level.get('UserName')
    group_name = top_level.get('GroupName')

    event_data = plist_event.PlistTimeEventData()
    event_data.desc = ('Launchd service config {0:s} points to {1:s} with '
                       'user:{2:s} group:{3:s}').format(label, command,
                                                        user_name, group_name)
    event_data.key = 'launchdServiceConfig'
    event_data.root = '/'

    date_time = dfdatetime_semantic_time.NotSet()
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)

    parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 4
0
    def Process(self, parser_mediator, root_item=None, **kwargs):
        """Extracts events from an OLECF file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      root_item (Optional[pyolecf.item]): root item of the OLECF file.

    Raises:
      ValueError: If the root item is not set.
    """
        # This will raise if unhandled keyword arguments are passed.
        super(DefaultOLECFPlugin, self).Process(parser_mediator, **kwargs)

        if not root_item:
            raise ValueError('Root item not set.')

        if not self._ParseItem(parser_mediator, root_item):
            event_data = OLECFItemEventData()
            event_data.name = root_item.name
            event_data.size = root_item.size

            # If no event was produced, produce at least one for the root item.
            date_time = dfdatetime_semantic_time.NotSet()
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 5
0
  def ParseNameSpace(
      self, parser_mediator, cache=None, database=None, table=None,
      **unused_kwargs):
    """Parses the namespace table.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (Optional[ESEDBCache]): cache.
      database (Optional[ESEDatabase]): ESE database.
      table (Optional[pyesedb.table]): table.

    Raises:
      ValueError: if the database or table value is missing.
    """
    if database is None:
      raise ValueError('Missing database value.')

    if table is None:
      raise ValueError('Missing table value.')

    strings = cache.GetResults('strings')
    if not strings:
      esedb_table = database.GetTableByName('string')
      strings = self._GetDictFromStringsTable(parser_mediator, esedb_table)
      cache.StoreDictInCache('strings', strings)

    for record_index, esedb_record in enumerate(table.records):
      if parser_mediator.abort:
        break

      record_values = self._GetRecordValues(
          parser_mediator, table.name, record_index, esedb_record)

      event_data = FileHistoryNamespaceEventData()
      event_data.file_attribute = record_values.get('fileAttrib', None)
      event_data.identifier = record_values.get('id', None)
      event_data.parent_identifier = record_values.get('parentId', None)
      event_data.usn_number = record_values.get('usn', None)
      event_data.original_filename = strings.get(event_data.identifier, None)

      created_timestamp = record_values.get('fileCreated')
      if created_timestamp:
        date_time = dfdatetime_filetime.Filetime(timestamp=created_timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

      modified_timestamp = record_values.get('fileModified')
      if modified_timestamp:
        date_time = dfdatetime_filetime.Filetime(timestamp=modified_timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

      if not created_timestamp and not modified_timestamp:
        date_time = dfdatetime_semantic_time.NotSet()
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 6
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a ShutdownTime Windows Registry value.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        shutdown_value = registry_key.GetValueByName('ShutdownTime')
        if not shutdown_value:
            return

        try:
            date_time = self._ParseFiletime(shutdown_value.data)
        except errors.ParseError as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to determine shutdown timestamp with error: {0!s}'.
                format(exception))
            return

        if not date_time:
            date_time = dfdatetime_semantic_time.NotSet()

        event_data = ShutdownWindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = shutdown_value.offset
        event_data.value_name = shutdown_value.name

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_SHUTDOWN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 7
0
    def _ParseLeak(self,
                   parser_mediator,
                   cache_directories,
                   msiecf_item,
                   recovered=False):
        """Extract data from a MSIE Cache Files (MSIECF) leak item.

    Every item is stored as an event object, one for each timestamp.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache_directories (list[str]): cache directory names.
      msiecf_item (pymsiecf.leak): MSIECF leak item.
      recovered (Optional[bool]): True if the item was recovered.
    """
        # TODO: add support for possible last cache synchronization date and time.
        date_time = dfdatetime_semantic_time.NotSet()

        event_data = MSIECFLeakEventData()
        event_data.cached_filename = msiecf_item.filename
        event_data.cached_file_size = msiecf_item.cached_file_size
        event_data.cache_directory_index = msiecf_item.cache_directory_index
        event_data.offset = msiecf_item.offset
        event_data.recovered = recovered

        if (event_data.cache_directory_index >= 0
                and event_data.cache_directory_index < len(cache_directories)):
            event_data.cache_directory_name = (
                cache_directories[event_data.cache_directory_index])

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 8
0
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    # TODO: Test other Office versions to make sure this plugin is applicable.
    entries = []
    for registry_value in registry_key.GetValues():
      # Ignore any value not in the form: 'Item [0-9]+'.
      if not registry_value.name or not self._RE_VALUE_NAME.search(
          registry_value.name):
        continue

      # Ignore any value that is empty or that does not contain a string.
      if not registry_value.data or not registry_value.DataIsString():
        continue

      value_string = registry_value.GetDataAsObject()
      values = self._RE_VALUE_DATA.findall(value_string)

      # Values will contain a list containing a tuple containing 2 values.
      if len(values) != 1 or len(values[0]) != 2:
        continue

      try:
        timestamp = int(values[0][0], 16)
      except ValueError:
        parser_mediator.ProduceExtractionWarning((
            'unable to convert filetime string to an integer for '
            'value: {0:s}.').format(registry_value.name))
        continue

      event_data = OfficeMRUWindowsRegistryEventData()
      event_data.key_path = registry_key.path
      # TODO: split value string in individual values.
      event_data.value_string = value_string

      entries.append('{0:s}: {1:s}'.format(registry_value.name, value_string))

      if not timestamp:
        date_time = dfdatetime_semantic_time.NotSet()
      else:
        date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

      # TODO: determine if this should be last written time.
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    event_data = OfficeMRUListWindowsRegistryEventData()
    event_data.entries = ' '.join(entries) or None
    event_data.key_path = registry_key.path

    event = time_events.DateTimeValuesEvent(
        registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 9
0
    def _ParseRecord(self,
                     parser_mediator,
                     record_index,
                     evtx_record,
                     recovered=False):
        """Extract data from a Windows XML EventLog (EVTX) record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      record_index (int): event record index.
      evtx_record (pyevtx.record): event record.
      recovered (Optional[bool]): True if the record was recovered.
    """
        event_data = self._GetEventDataFromRecord(parser_mediator,
                                                  record_index,
                                                  evtx_record,
                                                  recovered=recovered)

        try:
            written_time = evtx_record.get_written_time_as_integer()
        except OverflowError as exception:
            warning_message = (
                'unable to read written time from event record: {0:d} '
                'with error: {1!s}').format(record_index, exception)
            if recovered:
                parser_mediator.ProduceRecoveryWarning(warning_message)
            else:
                parser_mediator.ProduceExtractionWarning(warning_message)

            written_time = None

        if written_time is None:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=written_time)

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        try:
            creation_time = evtx_record.get_creation_time_as_integer()
        except OverflowError as exception:
            warning_message = (
                'unable to read creation time from event record: {0:d} '
                'with error: {1!s}').format(record_index, exception)
            if recovered:
                parser_mediator.ProduceRecoveryWarning(warning_message)
            else:
                parser_mediator.ProduceExtractionWarning(warning_message)

            creation_time = None

        if creation_time:
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 10
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Restore Point (rp.log) log file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        file_size = file_object.get_size()

        file_header_map = self._GetDataTypeMap('rp_log_file_header')

        try:
            file_header, _ = self._ReadStructureFromFileObject(
                file_object, 0, file_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile(
                'Unable to parse file header with error: {0!s}'.format(
                    exception))

        file_footer_map = self._GetDataTypeMap('rp_log_file_footer')

        file_footer_offset = file_size - file_footer_map.GetByteSize()

        try:
            file_footer, _ = self._ReadStructureFromFileObject(
                file_object, file_footer_offset, file_footer_map)
        except (ValueError, errors.ParseError) as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to parse file footer with error: {0!s}'.format(
                    exception))
            return

        # The description in the file header includes the end-of-string character
        # that we need to strip off.
        description = file_header.description.rstrip('\0')

        if file_footer.creation_time == 0:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=file_footer.creation_time)

        event_data = RestorePointEventData()
        event_data.description = description
        event_data.restore_point_event_type = file_header.event_type
        event_data.restore_point_type = file_header.restore_point_type
        event_data.sequence_number = file_header.sequence_number

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 11
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Recycle.Bin metadata ($I) file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        # We may have to rely on filenames since this header is very generic.

        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith('$I'):
            raise errors.UnableToParseFile('Filename must start with $I.')

        file_header_map = self._GetDataTypeMap(
            'recycle_bin_metadata_file_header')

        try:
            file_header, _ = self._ReadStructureFromFileObject(
                file_object, 0, file_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile((
                'Unable to parse Windows Recycle.Bin metadata file header with '
                'error: {0!s}').format(exception))

        if file_header.format_version not in self._SUPPORTED_FORMAT_VERSIONS:
            raise errors.UnableToParseFile(
                'Unsupported format version: {0:d}.'.format(
                    file_header.format_version))

        if file_header.deletion_time == 0:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=file_header.deletion_time)

        event_data = WinRecycleBinEventData()
        try:
            event_data.original_filename = self._ParseOriginalFilename(
                file_object, file_header.format_version)
        except (ValueError, errors.ParseError) as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to parse original filename with error: {0!s}.'.format(
                    exception))

        event_data.file_size = file_header.original_file_size

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_DELETED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 12
0
    def _ParseRecord(self,
                     parser_mediator,
                     record_index,
                     evtx_record,
                     recovered=False):
        """Extract data from a Windows XML EventLog (EVTX) record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      record_index (int): event record index.
      evtx_record (pyevtx.record): event record.
      recovered (Optional[bool]): True if the record was recovered.
    """
        event_data = self._GetEventDataFromRecord(parser_mediator,
                                                  record_index,
                                                  evtx_record,
                                                  recovered=recovered)

        try:
            written_time = evtx_record.get_written_time_as_integer()
        except OverflowError as exception:
            parser_mediator.ProduceExtractionWarning(
                ('unable to read written time from event record: {0:d} '
                 'with error: {1!s}').format(record_index, exception))

            written_time = None

        if written_time is None:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=written_time)

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        creation_time_string = self._GetCreationTimeFromXMLString(
            parser_mediator, record_index, event_data.xml_string)
        if creation_time_string:
            date_time = dfdatetime_time_elements.TimeElementsInMicroseconds()

            try:
                date_time.CopyFromStringISO8601(creation_time_string)
            except ValueError as exception:
                parser_mediator.ProduceExtractionWarning(
                    'unsupported creation time: {0:s} with error: {1!s}.'.
                    format(creation_time_string, exception))
                date_time = None

            if date_time:
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_CREATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 13
0
    def _ParseMetadataItem(self, parser_mediator, metadata_item):
        """Parses an Apple Spotlight store metadata item.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      metadata_item (SpotlightStoreMetadataItem): a metadata item.
    """
        event_data = SpotlightStoreMetadataItemEventData()

        # TODO: for identifier 1 extract and process kMDStoreProperties plist

        # Identifier 1 is used for volume metadata.
        if metadata_item.identifier > 1:
            event_data.file_system_identifier = metadata_item.identifier
            event_data.parent_file_system_identifier = metadata_item.parent_identifier

        for metadata_attribute_name, attribute_name in (
                self._EVENT_DATA_METADATA_ATTRIBUTES):
            metadata_attribute = metadata_item.attributes.get(
                metadata_attribute_name, None)
            if metadata_attribute:
                setattr(event_data, attribute_name, metadata_attribute.value)

        for metadata_attribute_name, timestamp_description in (
                self._DATE_TIME_METADATA_ATTRIBUTES):
            metadata_attribute = metadata_item.attributes.get(
                metadata_attribute_name, None)
            if metadata_attribute and metadata_attribute.value:
                if isinstance(metadata_attribute.value, collections.Sequence):
                    timestamps = metadata_attribute.value
                else:
                    timestamps = [metadata_attribute.value]

                for timestamp in timestamps:
                    date_time = dfdatetime_cocoa_time.CocoaTime(
                        timestamp=timestamp)

                    event = time_events.DateTimeValuesEvent(
                        date_time, timestamp_description)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

        if metadata_item.last_update_time == 0:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
                timestamp=metadata_item.last_update_time)

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_UPDATE)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 14
0
    def _GetDateTime(self, filetime):
        """Retrieves the date and time from a FILETIME timestamp.

    Args:
      filetime (int): FILETIME timestamp.

    Returns:
      dfdatetime.DateTimeValues: date and time.
    """
        if filetime == 0:
            return dfdatetime_semantic_time.NotSet()

        return dfdatetime_filetime.Filetime(timestamp=filetime)
Ejemplo n.º 15
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Portable Executable (PE) file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfVFS.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        pe_data_slice = dfvfs_data_slice.DataSlice(file_object)
        try:
            pefile_object = pefile.PE(data=pe_data_slice, fast_load=True)
            pefile_object.parse_data_directories(
                directories=self._PE_DIRECTORIES)
        except Exception as exception:
            raise errors.UnableToParseFile(
                'Unable to read PE file with error: {0!s}'.format(exception))

        event_data = PEEventData()
        # Note that the result of get_imphash() is an empty string if there is no
        # import hash.
        event_data.imphash = pefile_object.get_imphash() or None
        event_data.pe_type = self._GetPEType(pefile_object)
        event_data.section_names = self._GetSectionNames(pefile_object)

        timestamp = getattr(pefile_object.FILE_HEADER, 'TimeDateStamp', None)
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
        else:
            date_time = dfdatetime_semantic_time.NotSet()

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        self._ParseExportTable(parser_mediator, pefile_object, event_data)

        self._ParseImportTable(parser_mediator, pefile_object, event_data)

        self._ParseLoadConfigurationTable(parser_mediator, pefile_object,
                                          event_data)

        self._ParseDelayImportTable(parser_mediator, pefile_object, event_data)

        self._ParseResourceSection(parser_mediator, pefile_object, event_data)
Ejemplo n.º 16
0
    def _ParseCookieData(self,
                         parser_mediator,
                         cookie_data=None,
                         url=None,
                         **kwargs):
        """Extracts events from cookie data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (bytes): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields != 1:
            parser_mediator.ProduceExtractionWarning(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        try:
            # TODO: fix that we're losing precision here use dfdatetime.
            last_visit_posix_time = int(fields[0], 10) / 10000000
        except ValueError:
            last_visit_posix_time = None

        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        else:
            date_time = dfdatetime_semantic_time.NotSet()
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        event_data = GoogleAnalyticsEventData('utmt')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.url = url

        event = time_events.DateTimeValuesEvent(date_time,
                                                timestamp_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 17
0
    def _ParseRedirected(self, parser_mediator, msiecf_item, recovered=False):
        """Extract data from a MSIE Cache Files (MSIECF) redirected item.

    Every item is stored as an event object, one for each timestamp.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      msiecf_item (pymsiecf.redirected): MSIECF redirected item.
      recovered (Optional[bool]): True if the item was recovered.
    """
        date_time = dfdatetime_semantic_time.NotSet()

        event_data = MSIECFRedirectedEventData()
        event_data.offset = msiecf_item.offset
        event_data.recovered = recovered
        event_data.url = msiecf_item.location

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 18
0
  def ParseConversationRow(self, parser_mediator, query, row, **unused_kwargs):
    """Parses a conversation row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
    """
    query_hash = hash(query)

    event_data = TangoAndroidConversationEventData()
    event_data.conversation_identifier = self._GetRowValue(
        query_hash, row, 'conv_id')

    # TODO: payload is a base64 encoded binary blob, we need to find the
    # structure to extract the relevant bits.
    # event_data.payload = self._GetRowValue(query_hash, row, 'payload')

    date_time = dfdatetime_semantic_time.NotSet()
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 19
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.

    Raises:
      ParseError: if the value data could not be parsed.
    """
        value = registry_key.GetValueByName('AppCompatCache')
        if not value:
            return

        value_data = value.data
        value_data_size = len(value.data)

        format_type = self._CheckSignature(value_data)
        if not format_type:
            parser_mediator.ProduceExtractionWarning(
                'Unsupported signature in AppCompatCache key: {0:s}'.format(
                    registry_key.path))
            return

        header_object = self._ParseHeader(format_type, value_data)

        # On Windows Vista and 2008 when the cache is empty it will
        # only consist of the header.
        if value_data_size <= header_object.header_size:
            return

        cached_entry_offset = header_object.header_size

        self._cached_entry_data_type_map = self._GetCachedEntryDataTypeMap(
            format_type, value_data, cached_entry_offset)
        if not self._cached_entry_data_type_map:
            raise errors.ParseError(
                'Unable to determine cached entry data type.')

        parse_cached_entry_function = None
        if format_type == self._FORMAT_TYPE_XP:
            parse_cached_entry_function = self._ParseCachedEntryXP
        elif format_type == self._FORMAT_TYPE_2003:
            parse_cached_entry_function = self._ParseCachedEntry2003
        elif format_type == self._FORMAT_TYPE_VISTA:
            parse_cached_entry_function = self._ParseCachedEntryVista
        elif format_type == self._FORMAT_TYPE_7:
            parse_cached_entry_function = self._ParseCachedEntry7
        elif format_type == self._FORMAT_TYPE_8:
            parse_cached_entry_function = self._ParseCachedEntry8
        elif format_type == self._FORMAT_TYPE_10:
            parse_cached_entry_function = self._ParseCachedEntry10

        cached_entry_index = 0
        while cached_entry_offset < value_data_size:
            cached_entry_object = parse_cached_entry_function(
                value_data, cached_entry_offset)

            event_data = AppCompatCacheEventData()
            event_data.entry_index = cached_entry_index + 1
            event_data.key_path = registry_key.path
            event_data.offset = cached_entry_offset
            event_data.path = cached_entry_object.path

            if cached_entry_object.last_modification_time is not None:
                if not cached_entry_object.last_modification_time:
                    date_time = dfdatetime_semantic_time.NotSet()
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_modification_time)

                # TODO: refactor to file modification event.
                event = time_events.DateTimeValuesEvent(
                    date_time, 'File Last Modification Time')
                parser_mediator.ProduceEventWithEventData(event, event_data)

            if cached_entry_object.last_update_time is not None:
                if not cached_entry_object.last_update_time:
                    date_time = dfdatetime_semantic_time.NotSet()
                else:
                    date_time = dfdatetime_filetime.Filetime(
                        timestamp=cached_entry_object.last_update_time)

                # TODO: refactor to process run event.
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            cached_entry_offset += cached_entry_object.cached_entry_size
            cached_entry_index += 1

            if (header_object.number_of_cached_entries != 0
                    and cached_entry_index >=
                    header_object.number_of_cached_entries):
                break
Ejemplo n.º 20
0
    def _ParseUrl(self,
                  parser_mediator,
                  format_version,
                  cache_directories,
                  msiecf_item,
                  recovered=False):
        """Extract data from a MSIE Cache Files (MSIECF) URL item.

    Every item is stored as an event object, one for each timestamp.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      format_version (str): MSIECF format version.
      cache_directories (list[str]): cache directory names.
      msiecf_item (pymsiecf.url): MSIECF URL item.
      recovered (Optional[bool]): True if the item was recovered.
    """
        # The secondary time can be stored in either UTC or local time
        # this is dependent on what the index.dat file is used for.
        # Either the file path or location string can be used to distinguish
        # between the different type of files.
        timestamp = msiecf_item.get_primary_time_as_integer()
        if not timestamp:
            primary_date_time = dfdatetime_semantic_time.NotSet()
        else:
            primary_date_time = dfdatetime_filetime.Filetime(
                timestamp=timestamp)
        primary_date_time_description = 'Primary Time'

        timestamp = msiecf_item.get_secondary_time_as_integer()
        secondary_date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
        secondary_date_time_description = 'Secondary Time'

        if msiecf_item.type:
            if msiecf_item.type == 'cache':
                primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_MODIFICATION)

            elif msiecf_item.type == 'cookie':
                primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_MODIFICATION)

            elif msiecf_item.type == 'history':
                primary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)

            elif msiecf_item.type == 'history-daily':
                primary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                # The secondary_date_time is in localtime normalize it to be in UTC.
                secondary_date_time.is_local_time = True

            elif msiecf_item.type == 'history-weekly':
                primary_date_time_description = definitions.TIME_DESCRIPTION_CREATION
                secondary_date_time_description = (
                    definitions.TIME_DESCRIPTION_LAST_VISITED)
                # The secondary_date_time is in localtime normalize it to be in UTC.
                secondary_date_time.is_local_time = True

        http_headers = ''
        if msiecf_item.type and msiecf_item.data:
            if msiecf_item.type == 'cache':
                if msiecf_item.data[:4] == b'HTTP':
                    # Make sure the HTTP headers are ASCII encoded.
                    # TODO: determine correct encoding currently indications that
                    # this could be the system narrow string codepage.
                    try:
                        http_headers = msiecf_item.data[:-1].decode('ascii')
                    except UnicodeDecodeError:
                        parser_mediator.ProduceExtractionWarning((
                            'unable to decode HTTP headers of URL record at offset: '
                            '0x{0:08x}. Characters that cannot be decoded will be '
                            'replaced with "?" or "\\ufffd".').format(
                                msiecf_item.offset))
                        http_headers = msiecf_item.data[:-1].decode(
                            'ascii', errors='replace')

            # TODO: parse data of other URL item type like history which requires
            # OLE VT parsing.

        event_data = MSIECFURLEventData()
        event_data.cached_filename = msiecf_item.filename
        event_data.cached_file_size = msiecf_item.cached_file_size
        event_data.cache_directory_index = msiecf_item.cache_directory_index
        event_data.http_headers = http_headers
        event_data.number_of_hits = msiecf_item.number_of_hits
        event_data.offset = msiecf_item.offset
        event_data.recovered = recovered
        event_data.url = msiecf_item.location

        if (event_data.cache_directory_index >= 0
                and event_data.cache_directory_index < len(cache_directories)):
            event_data.cache_directory_name = (
                cache_directories[event_data.cache_directory_index])

        event = time_events.DateTimeValuesEvent(primary_date_time,
                                                primary_date_time_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        if secondary_date_time.timestamp != 0:
            event = time_events.DateTimeValuesEvent(
                secondary_date_time,
                secondary_date_time_description,
                time_zone=parser_mediator.timezone)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        expiration_timestamp = msiecf_item.get_expiration_time_as_integer()
        if expiration_timestamp != 0:
            # The expiration time in MSIECF version 4.7 is stored as a FILETIME value
            # in version 5.2 it is stored as a FAT date time value.
            # Since the as_integer function returns the raw integer value we need to
            # apply the right conversion here.
            if format_version == '4.7':
                if expiration_timestamp == 0x7fffffffffffffff:
                    expiration_date_time = dfdatetime_semantic_time.Never()
                else:
                    expiration_date_time = dfdatetime_filetime.Filetime(
                        timestamp=expiration_timestamp)
            else:
                if expiration_timestamp == 0xffffffff:
                    expiration_date_time = dfdatetime_semantic_time.Never()
                else:
                    expiration_date_time = dfdatetime_fat_date_time.FATDateTime(
                        fat_date_time=expiration_timestamp)

            event = time_events.DateTimeValuesEvent(
                expiration_date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        last_checked_timestamp = msiecf_item.get_last_checked_time_as_integer()
        if last_checked_timestamp != 0:
            last_checked_date_time = dfdatetime_fat_date_time.FATDateTime(
                fat_date_time=last_checked_timestamp)

            event = time_events.DateTimeValuesEvent(
                last_checked_date_time,
                definitions.TIME_DESCRIPTION_LAST_CHECKED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 21
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses an ASL file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        file_header_map = self._GetDataTypeMap('asl_file_header')

        try:
            file_header, _ = self._ReadStructureFromFileObject(
                file_object, 0, file_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.UnableToParseFile(
                'Unable to parse file header with error: {0!s}'.format(
                    exception))

        is_dirty = False
        file_size = file_object.get_size()

        if file_header.first_log_entry_offset > 0:
            last_log_entry_offset = 0
            file_offset = file_header.first_log_entry_offset

            while file_offset < file_size:
                last_log_entry_offset = file_offset

                try:
                    file_offset = self._ParseRecord(parser_mediator,
                                                    file_object, file_offset)
                except errors.ParseError as exception:
                    parser_mediator.ProduceExtractionWarning(
                        'unable to parse record with error: {0!s}'.format(
                            exception))
                    return

                if file_offset == 0:
                    break

            if last_log_entry_offset != file_header.last_log_entry_offset:
                is_dirty = True
                parser_mediator.ProduceRecoveryWarning(
                    'last log entry offset does not match value in file header.'
                )

        event_data = ASLFileEventData()
        event_data.format_version = file_header.format_version
        event_data.is_dirty = is_dirty

        if file_header.creation_time:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=file_header.creation_time)
        else:
            date_time = dfdatetime_semantic_time.NotSet()

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 22
0
    def _ParseCookieData(self,
                         parser_mediator,
                         cookie_data=None,
                         url=None,
                         **kwargs):
        """Extracts events from cookie data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (str): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields not in (1, 6):
            parser_mediator.ProduceExtractionWarning(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        if number_of_fields == 1:
            domain_hash = None
            visitor_identifier = None
            first_visit_posix_time = None
            previous_visit_posix_time = None

            try:
                # TODO: fix that we're losing precision here use dfdatetime.
                last_visit_posix_time = int(fields[0], 10) / 10000000
            except ValueError:
                last_visit_posix_time = None

            number_of_sessions = None

        elif number_of_fields == 6:
            domain_hash = fields[0]
            visitor_identifier = fields[1]

            # TODO: Double check this time is stored in UTC and not local time.
            try:
                first_visit_posix_time = int(fields[2], 10)
            except ValueError:
                first_visit_posix_time = None

            try:
                previous_visit_posix_time = int(fields[3], 10)
            except ValueError:
                previous_visit_posix_time = None

            try:
                last_visit_posix_time = int(fields[4], 10)
            except ValueError:
                last_visit_posix_time = None

            try:
                number_of_sessions = int(fields[5], 10)
            except ValueError:
                number_of_sessions = None

        event_data = GoogleAnalyticsEventData('utma')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.domain_hash = domain_hash
        event_data.sessions = number_of_sessions
        event_data.url = url
        event_data.visitor_id = visitor_identifier

        if first_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=first_visit_posix_time)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Analytics Creation Time')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if previous_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=previous_visit_posix_time)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Analytics Previous Time')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        date_time = None
        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        elif first_visit_posix_time is None and previous_visit_posix_time is None:
            # If both creation_time and written_time are None produce an event
            # object without a timestamp.
            date_time = dfdatetime_semantic_time.NotSet()
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        if date_time is not None:
            event = time_events.DateTimeValuesEvent(date_time,
                                                    timestamp_description)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 23
0
    def _ParseCookieData(self,
                         parser_mediator,
                         cookie_data=None,
                         url=None,
                         **kwargs):
        """Extracts events from cookie data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (str): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields > 5:
            variables = '.'.join(fields[4:])
            fields = fields[0:4]
            fields.append(variables)
            number_of_fields = len(fields)

        if number_of_fields not in (1, 5):
            parser_mediator.ProduceExtractionWarning(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        if number_of_fields == 1:
            domain_hash = None

            try:
                # TODO: fix that we're losing precision here use dfdatetime.
                last_visit_posix_time = int(fields[0], 10) / 10000000
            except ValueError:
                last_visit_posix_time = None

            number_of_sessions = None
            number_of_sources = None
            extra_attributes = {}

        elif number_of_fields == 5:
            domain_hash = fields[0]

            try:
                last_visit_posix_time = int(fields[1], 10)
            except ValueError:
                last_visit_posix_time = None

            try:
                number_of_sessions = int(fields[2], 10)
            except ValueError:
                number_of_sessions = None

            try:
                number_of_sources = int(fields[3], 10)
            except ValueError:
                number_of_sources = None

            extra_variables = fields[4].split('|')

            extra_attributes = {}
            for variable in extra_variables:
                key, _, value = variable.partition('=')
                extra_attributes[key] = urlparse.unquote(value)

        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        else:
            date_time = dfdatetime_semantic_time.NotSet()
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        event_data = GoogleAnalyticsEventData('utmz')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.domain_hash = domain_hash
        event_data.sessions = number_of_sessions
        event_data.sources = number_of_sources
        event_data.url = url

        for key, value in extra_attributes.items():
            setattr(event_data, key, value)

        event = time_events.DateTimeValuesEvent(date_time,
                                                timestamp_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 24
0
    def _ParseInfo2Record(self, parser_mediator, file_object, record_offset,
                          record_size):
        """Parses an INFO-2 record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.

    Raises:
      ParseError: if the record cannot be read.
    """
        record_data = self._ReadData(file_object, record_offset, record_size)

        record_map = self._GetDataTypeMap('recycler_info2_file_entry')

        try:
            record = self._ReadStructureFromByteStream(record_data,
                                                       record_offset,
                                                       record_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError(
                ('Unable to map record data at offset: 0x{0:08x} with error: '
                 '{1!s}').format(record_offset, exception))

        codepage = parser_mediator.codepage or 'ascii'

        # The original filename can contain remnant data after the end-of-string
        # character.
        ascii_filename = record.original_filename.split(b'\x00')[0]

        try:
            ascii_filename = ascii_filename.decode(codepage)
        except UnicodeDecodeError:
            ascii_filename = ascii_filename.decode(codepage, errors='replace')

            parser_mediator.ProduceExtractionWarning(
                'unable to decode original filename.')

        unicode_filename = None
        if record_size > 280:
            record_offset += 280
            utf16_string_map = self._GetDataTypeMap(
                'recycler_info2_file_entry_utf16le_string')

            try:
                unicode_filename = self._ReadStructureFromByteStream(
                    record_data[280:], record_offset, utf16_string_map)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError((
                    'Unable to map record data at offset: 0x{0:08x} with error: '
                    '{1!s}').format(record_offset, exception))

        if record.deletion_time == 0:
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=record.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.drive_number = record.drive_number
        event_data.original_filename = unicode_filename or ascii_filename
        event_data.file_size = record.original_file_size
        event_data.offset = record_offset
        event_data.record_index = record.index

        if ascii_filename != unicode_filename:
            event_data.short_filename = ascii_filename

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_DELETED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 25
0
    def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal):
        """Parses an USN change journal.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal.

    Raises:
      ParseError: if an USN change journal record cannot be parsed.
    """
        if not usn_change_journal:
            return

        usn_record_map = self._GetDataTypeMap('usn_record_v2')

        usn_record_data = usn_change_journal.read_usn_record()
        while usn_record_data:
            current_offset = usn_change_journal.get_offset()

            try:
                usn_record = self._ReadStructureFromByteStream(
                    usn_record_data, current_offset, usn_record_map)
            except (ValueError, errors.ParseError) as exception:
                raise errors.ParseError((
                    'Unable to parse USN record at offset: 0x{0:08x} with error: '
                    '{1!s}').format(current_offset, exception))

            # Per MSDN we need to use name offset for forward compatibility.
            name_offset = usn_record.name_offset - 60
            utf16_stream = usn_record.name[name_offset:usn_record.name_size]

            try:
                name_string = utf16_stream.decode('utf-16-le')
            except (UnicodeDecodeError, UnicodeEncodeError) as exception:
                name_string = utf16_stream.decode('utf-16-le',
                                                  errors='replace')
                parser_mediator.ProduceExtractionWarning((
                    'unable to decode USN record name string with error: '
                    '{0:s}. Characters that cannot be decoded will be replaced '
                    'with "?" or "\\ufffd".').format(exception))

            event_data = NTFSUSNChangeEventData()
            event_data.file_attribute_flags = usn_record.file_attribute_flags
            event_data.file_reference = usn_record.file_reference
            event_data.filename = name_string
            event_data.offset = current_offset
            event_data.parent_file_reference = usn_record.parent_file_reference
            event_data.update_reason_flags = usn_record.update_reason_flags
            event_data.update_sequence_number = usn_record.update_sequence_number
            event_data.update_source_flags = usn_record.update_source_flags

            if not usn_record.update_date_time:
                date_time = dfdatetime_semantic_time.NotSet()
            else:
                date_time = dfdatetime_filetime.Filetime(
                    timestamp=usn_record.update_date_time)

            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

            usn_record_data = usn_change_journal.read_usn_record()
Ejemplo n.º 26
0
  def ParseDestList(self, parser_mediator, olecf_item):
    """Parses the DestList OLECF item.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      olecf_item (pyolecf.item): OLECF item.

    Raises:
      UnableToParseFile: if the DestList cannot be parsed.
    """
    if olecf_item.size == 0:
      parser_mediator.ProduceExtractionWarning('empty DestList stream')
      return

    header_map = self._GetDataTypeMap('dest_list_header')

    try:
      header, entry_offset = self._ReadStructureFromFileObject(
          olecf_item, 0, header_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.UnableToParseFile(
          'Unable to parse DestList header with error: {0!s}'.format(
              exception))

    if header.format_version == 1:
      entry_map = self._GetDataTypeMap('dest_list_entry_v1')
    elif header.format_version in (3, 4):
      entry_map = self._GetDataTypeMap('dest_list_entry_v3')
    else:
      parser_mediator.ProduceExtractionWarning(
          'unsupported format version: {0:d}.'.format(header.format_version))
      return

    while entry_offset < olecf_item.size:
      try:
        entry, entry_data_size = self._ReadStructureFromFileObject(
            olecf_item, entry_offset, entry_map)
      except (ValueError, errors.ParseError) as exception:
        raise errors.UnableToParseFile(
            'Unable to parse DestList entry with error: {0!s}'.format(
                exception))

      display_name = 'DestList entry at offset: 0x{0:08x}'.format(entry_offset)

      try:
        droid_volume_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_volume_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_volume_identifier = ''
        parser_mediator.ProduceExtractionWarning(
            'unable to read droid volume identifier with error: {0!s}'.format(
                exception))

      try:
        droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_file_identifier = ''
        parser_mediator.ProduceExtractionWarning(
            'unable to read droid file identifier with error: {0!s}'.format(
                exception))

      try:
        birth_droid_volume_identifier = (
            self._ParseDistributedTrackingIdentifier(
                parser_mediator, entry.birth_droid_volume_identifier,
                display_name))

      except (TypeError, ValueError) as exception:
        birth_droid_volume_identifier = ''
        parser_mediator.ProduceExtractionWarning((
            'unable to read birth droid volume identifier with error: '
            '{0:s}').format(
                exception))

      try:
        birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.birth_droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        birth_droid_file_identifier = ''
        parser_mediator.ProduceExtractionWarning((
            'unable to read birth droid file identifier with error: '
            '{0:s}').format(
                exception))

      if entry.last_modification_time == 0:
        date_time = dfdatetime_semantic_time.NotSet()
      else:
        date_time = dfdatetime_filetime.Filetime(
            timestamp=entry.last_modification_time)

      event_data = AutomaticDestinationsDestListEntryEventData()
      event_data.birth_droid_file_identifier = birth_droid_file_identifier
      event_data.birth_droid_volume_identifier = birth_droid_volume_identifier
      event_data.droid_file_identifier = droid_file_identifier
      event_data.droid_volume_identifier = droid_volume_identifier
      event_data.entry_number = entry.entry_number
      event_data.hostname = entry.hostname.rstrip('\x00')
      event_data.offset = entry_offset
      event_data.path = entry.path.rstrip('\x00')
      event_data.pin_status = entry.pin_status

      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

      entry_offset += entry_data_size
Ejemplo n.º 27
0
    def _ParseSCCAFile(self, parser_mediator, scca_file):
        """Parses a Windows Prefetch (SCCA) file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      scca_file (pyscca.file): Windows Prefetch (SCCA) file

    Raises:
      IOError: if the Windows Prefetch (SCCA) file cannot be parsed.
    """
        format_version = scca_file.format_version
        executable_filename = scca_file.executable_filename
        prefetch_hash = scca_file.prefetch_hash
        run_count = scca_file.run_count
        number_of_volumes = scca_file.number_of_volumes

        volume_serial_numbers = []
        volume_device_paths = []
        path_hints = []

        for volume_information in iter(scca_file.volumes):
            volume_serial_number = volume_information.serial_number
            volume_device_path = volume_information.device_path

            volume_serial_numbers.append(volume_serial_number)
            volume_device_paths.append(volume_device_path)

            timestamp = volume_information.get_creation_time_as_integer()
            if timestamp:
                event_data = windows_events.WindowsVolumeEventData()
                event_data.device_path = volume_device_path
                event_data.origin = parser_mediator.GetFilename()
                event_data.serial_number = volume_serial_number

                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_CREATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            for filename in iter(scca_file.filenames):
                if not filename:
                    continue

                if (filename.startswith(volume_device_path)
                        and filename.endswith(executable_filename)):
                    _, _, path = filename.partition(volume_device_path)
                    path_hints.append(path)

        mapped_files = []
        for entry_index, file_metrics in enumerate(
                scca_file.file_metrics_entries):
            mapped_file_string = file_metrics.filename
            if not mapped_file_string:
                parser_mediator.ProduceExtractionWarning(
                    'missing filename for file metrics entry: {0:d}'.format(
                        entry_index))
                continue

            file_reference = file_metrics.file_reference
            if file_reference:
                mapped_file_string = ('{0:s} [{1:d}-{2:d}]').format(
                    mapped_file_string, file_reference & 0xffffffffffff,
                    file_reference >> 48)

            mapped_files.append(mapped_file_string)

        event_data = WinPrefetchExecutionEventData()
        event_data.executable = executable_filename
        event_data.mapped_files = mapped_files
        event_data.number_of_volumes = number_of_volumes
        event_data.path_hints = path_hints
        event_data.prefetch_hash = prefetch_hash
        event_data.run_count = run_count
        event_data.version = format_version
        event_data.volume_device_paths = volume_device_paths
        event_data.volume_serial_numbers = volume_serial_numbers

        timestamp = scca_file.get_last_run_time_as_integer(0)
        if not timestamp:
            parser_mediator.ProduceExtractionWarning('missing last run time')
            date_time = dfdatetime_semantic_time.NotSet()
        else:
            date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # Check for the 7 older last run time values available since
        # format version 26.
        if format_version >= 26:
            for last_run_time_index in range(1, 8):
                timestamp = scca_file.get_last_run_time_as_integer(
                    last_run_time_index)
                if not timestamp:
                    continue

                date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
                date_time_description = 'Previous {0:s}'.format(
                    definitions.TIME_DESCRIPTION_LAST_RUN)
                event = time_events.DateTimeValuesEvent(
                    date_time, date_time_description)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 28
0
 def testInitialize(self):
   """Tests the __init__ function."""
   not_set_time_object = semantic_time.NotSet()
   self.assertEqual(not_set_time_object.string, 'Not set')
Ejemplo n.º 29
0
    def _ParseRecord(self, parser_mediator, page_data, record_offset):
        """Parses a record from the page data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      page_data (bytes): page data.
      record_offset (int): offset of the record relative to the start
          of the page.

    Raises:
      ParseError: when the record cannot be parsed.
    """
        record_header_map = self._GetDataTypeMap('binarycookies_record_header')

        try:
            record_header = self._ReadStructureFromByteStream(
                page_data[record_offset:], record_offset, record_header_map)
        except (ValueError, errors.ParseError) as exception:
            raise errors.ParseError((
                'Unable to map record header data at offset: 0x{0:08x} with error: '
                '{1!s}').format(record_offset, exception))

        event_data = SafariBinaryCookieEventData()
        event_data.flags = record_header.flags

        if record_header.url_offset:
            data_offset = record_offset + record_header.url_offset
            event_data.url = self._ParseCString(page_data, data_offset)

        if record_header.name_offset:
            data_offset = record_offset + record_header.name_offset
            event_data.cookie_name = self._ParseCString(page_data, data_offset)

        if record_header.path_offset:
            data_offset = record_offset + record_header.path_offset
            event_data.path = self._ParseCString(page_data, data_offset)

        if record_header.value_offset:
            data_offset = record_offset + record_header.value_offset
            event_data.cookie_value = self._ParseCString(
                page_data, data_offset)

        if record_header.creation_time:
            date_time = dfdatetime_cocoa_time.CocoaTime(
                timestamp=record_header.creation_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if record_header.expiration_time:
            date_time = dfdatetime_cocoa_time.CocoaTime(
                timestamp=record_header.expiration_time)
        else:
            date_time = dfdatetime_semantic_time.NotSet()

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        for plugin in self._cookie_plugins:
            if parser_mediator.abort:
                break

            if event_data.cookie_name != plugin.COOKIE_NAME:
                continue

            try:
                plugin.UpdateChainAndProcess(
                    parser_mediator,
                    cookie_name=event_data.cookie_name,
                    cookie_data=event_data.cookie_value,
                    url=event_data.url)

            except Exception as exception:  # pylint: disable=broad-except
                parser_mediator.ProduceExtractionWarning(
                    'plugin: {0:s} unable to parse cookie with error: {1!s}'.
                    format(plugin.NAME, exception))
Ejemplo n.º 30
0
    def ParseRow(self, parser_mediator, row_offset, row):
        """Parses a line of the log file and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row_offset (int): number of the corresponding line.
      row (dict[str, str]): fields of a single row, as specified in COLUMNS.
    """
        filename = row.get('name', None)
        md5_hash = row.get('md5', None)
        mode = row.get('mode_as_string', None)

        inode_number = row.get('inode', None)
        if '-' in inode_number:
            inode_number, _, _ = inode_number.partition('-')

        try:
            inode_number = int(inode_number, 10)
        except (TypeError, ValueError):
            inode_number = None

        data_size = self._GetIntegerValue(row, 'size')
        user_uid = self._GetIntegerValue(row, 'uid')
        user_gid = self._GetIntegerValue(row, 'gid')

        symbolic_link_target = ''
        if mode and mode[0] == 'l' and ' -> ' in filename:
            filename, _, symbolic_link_target = filename.rpartition(' -> ')

        event_data = MactimeEventData()
        event_data.filename = filename
        event_data.inode = inode_number
        event_data.md5 = md5_hash
        event_data.mode_as_string = mode
        event_data.offset = row_offset
        event_data.size = data_size
        event_data.symbolic_link_target = symbolic_link_target
        event_data.user_gid = user_gid

        if user_uid is None:
            event_data.user_sid = None
        else:
            # Note that the user_sid value is expected to be a string.
            event_data.user_sid = '{0:d}'.format(user_uid)

        for value_name, timestamp_description in self._TIMESTAMP_DESC_MAP.items(
        ):
            posix_time = self._GetFloatingPointValue(row, value_name)

            # mactime will return 0 if the timestamp is not set.
            if not posix_time:
                posix_time = self._GetIntegerValue(row, value_name)

            if not posix_time:
                continue

            if posix_time == 0:
                date_time = dfdatetime_semantic_time.NotSet()

            elif isinstance(posix_time, float):
                posix_time = int(posix_time *
                                 definitions.NANOSECONDS_PER_SECOND)
                date_time = dfdatetime_posix_time.PosixTimeInNanoseconds(
                    timestamp=posix_time)

            else:
                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=posix_time)

            event = time_events.DateTimeValuesEvent(date_time,
                                                    timestamp_description)
            parser_mediator.ProduceEventWithEventData(event, event_data)