Exemple #1
0
  def Process(
      self, parser_mediator, cache=None, database=None, **unused_kwargs):
    """Extracts events from a SQLite database.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.

    Raises:
      ValueError: If the database or cache value are missing.
    """
    if cache is None:
      raise ValueError('Missing cache value.')

    if database is None:
      raise ValueError('Missing database value.')

    # This will raise if unhandled keyword arguments are passed.
    super(SQLitePlugin, self).Process(parser_mediator)

    for query, callback_method in self.QUERIES:
      if parser_mediator.abort:
        break

      callback = getattr(self, callback_method, None)
      if callback is None:
        logger.warning(
            '[{0:s}] missing callback method: {1:s} for query: {2:s}'.format(
                self.NAME, callback_method, query))
        continue

      self._ParseSQLiteDatabase(
          parser_mediator, database, query, callback, cache)
Exemple #2
0
    def Close(self):
        """Closes the database connection and cleans up the temporary file."""
        self.schema = {}

        if self._is_open:
            self._database.close()
        self._database = None

        if os.path.exists(self._temp_db_file_path):
            try:
                os.remove(self._temp_db_file_path)
            except (OSError, IOError) as exception:
                logger.warning((
                    'Unable to remove temporary copy: {0:s} of SQLite database: '
                    '{1:s} with error: {2!s}').format(self._temp_db_file_path,
                                                      self._filename,
                                                      exception))

        self._temp_db_file_path = ''

        if os.path.exists(self._temp_wal_file_path):
            try:
                os.remove(self._temp_wal_file_path)
            except (OSError, IOError) as exception:
                logger.warning((
                    'Unable to remove temporary copy: {0:s} of SQLite database: '
                    '{1:s} with error: {2!s}').format(self._temp_wal_file_path,
                                                      self._filename,
                                                      exception))

        self._temp_wal_file_path = ''

        self._is_open = False
Exemple #3
0
  def ParseRecord(self, parser_mediator, key, structure):
    """Parses a log record structure and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): identifier of the structure of tokens.
      structure (pyparsing.ParseResults): structure of tokens derived from
          a line of a text file.

    Raises:
      ParseError: when the structure type is unknown.
    """
    if key not in ('header', 'header_signature', 'logline'):
      raise errors.ParseError(
          'Unable to parse record, unknown structure: {0:s}'.format(key))

    if key == 'logline':
      self._ParseLogLine(parser_mediator, structure)

    elif key == 'header':
      self._ParseHeader(parser_mediator, structure)

    elif key == 'header_signature':
      # If this key is matched (after others keys failed) we got a different
      # localized header and we should stop parsing until a new good header
      # is found. Stop parsing is done setting xchat_year to 0.
      # Note that the code assumes that LINE_STRUCTURES will be used in the
      # exact order as defined!
      logger.warning('Unknown locale header.')
      self._xchat_year = 0
Exemple #4
0
    def VerifyFile(self, parser_mediator, file_object):
        """Check if the file is a BSM file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      bool: True if this is a valid BSM file, False otherwise.
    """
        # First part of the entry is always a Header.
        try:
            token_type = self._BSM_TOKEN.parse_stream(file_object)
        except (IOError, construct.FieldError):
            return False

        if token_type not in self._BSM_HEADER_TOKEN_TYPES:
            return False

        _, record_structure = self._BSM_TOKEN_TYPES.get(token_type, ('', None))

        try:
            header = record_structure.parse_stream(file_object)
        except (IOError, construct.FieldError):
            return False

        if header.bsm_header.version != self.AUDIT_HEADER_VERSION:
            return False

        try:
            token_identifier = self._BSM_TOKEN.parse_stream(file_object)
        except (IOError, construct.FieldError):
            return False

        # If is MacOS BSM file, next entry is a  text token indicating
        # if it is a normal start or it is a recovery track.
        if parser_mediator.operating_system == definitions.OPERATING_SYSTEM_MACOS:
            token_type, record_structure = self._BSM_TOKEN_TYPES.get(
                token_identifier, ('', None))

            if not record_structure:
                return False

            if token_type != 'BSM_TOKEN_TEXT':
                logger.warning('It is not a valid first entry for MacOS BSM.')
                return False

            try:
                token = record_structure.parse_stream(file_object)
            except (IOError, construct.FieldError):
                return False

            text = self._CopyUtf8ByteArrayToString(token.text)
            if (text != 'launchctl::Audit startup'
                    and text != 'launchctl::Audit recovery'):
                logger.warning('It is not a valid first entry for MacOS BSM.')
                return False

        return True
Exemple #5
0
    def _ParseMRUListValue(self, registry_key):
        """Parses the MRUList value in a given Registry key.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUList value.

    Returns:
      generator: MRUList value generator, which returns the MRU index number
          and entry value.
    """
        mru_list_value = registry_key.GetValueByName('MRUList')

        # The key exists but does not contain a value named "MRUList".
        if not mru_list_value:
            return enumerate([])

        try:
            mru_list = self._MRULIST_STRUCT.parse(mru_list_value.data)
        except construct.FieldError:
            logger.warning('[{0:s}] Unable to parse the MRU key: {1:s}'.format(
                self.NAME, registry_key.path))
            return enumerate([])

        return enumerate(mru_list)
  def _GetLatestYearFromFileEntry(self):
    """Retrieves the maximum (highest value) year from the file entry.

    This function uses the modification time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'mtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine modification year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine creation year from file stat '
          'information with error: {0!s}').format(exception))
      return None
Exemple #7
0
    def _GetEarliestYearFromFileEntry(self):
        """Retrieves the year from the file entry date and time values.

    This function uses the creation time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
        file_entry = self.GetFileEntry()
        if not file_entry:
            return None

        date_time = file_entry.creation_time
        if not date_time:
            date_time = file_entry.change_time

        # Gzip files do not store a creation or change time, but its modification
        # time is a good alternative.
        if file_entry.TYPE_INDICATOR == dfvfs_definitions.TYPE_INDICATOR_GZIP:
            date_time = file_entry.modification_time

        if date_time is None:
            logger.warning('File entry has no creation or change time.')
            return None

        year, _, _ = date_time.GetDate()
        return year
Exemple #8
0
  def Close(self):
    """Closes the database connection and cleans up the temporary file."""
    self.schema = {}

    if self._is_open:
      self._database.close()
    self._database = None

    if os.path.exists(self._temp_db_file_path):
      try:
        os.remove(self._temp_db_file_path)
      except (OSError, IOError) as exception:
        logger.warning((
            'Unable to remove temporary copy: {0:s} of SQLite database: '
            '{1:s} with error: {2!s}').format(
                self._temp_db_file_path, self._filename, exception))

    self._temp_db_file_path = ''

    if os.path.exists(self._temp_wal_file_path):
      try:
        os.remove(self._temp_wal_file_path)
      except (OSError, IOError) as exception:
        logger.warning((
            'Unable to remove temporary copy: {0:s} of SQLite database: '
            '{1:s} with error: {2!s}').format(
                self._temp_wal_file_path, self._filename, exception))

    self._temp_wal_file_path = ''

    self._is_open = False
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a log record structure and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): identifier of the structure of tokens.
      structure (pyparsing.ParseResults): structure of tokens derived from
          a line of a text file.

    Raises:
      ParseError: when the structure type is unknown.
    """
        if key not in ('header', 'header_signature', 'logline'):
            raise errors.ParseError(
                'Unable to parse record, unknown structure: {0:s}'.format(key))

        if key == 'logline':
            self._ParseLogLine(parser_mediator, structure)

        elif key == 'header':
            self._ParseHeader(parser_mediator, structure)

        elif key == 'header_signature':
            # If this key is matched (after others keys failed) we got a different
            # localized header and we should stop parsing until a new good header
            # is found. Stop parsing is done setting xchat_year to 0.
            # Note that the code assumes that LINE_STRUCTURES will be used in the
            # exact order as defined!
            logger.warning('Unknown locale header.')
            self._xchat_year = 0
Exemple #10
0
  def _GetLatestYearFromFileEntry(self):
    """Retrieves the maximum (highest value) year from the file entry.

    This function uses the modification time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'mtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine modification year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine creation year from file stat '
          'information with error: {0!s}').format(exception))
      return None
Exemple #11
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
        registry_find_specs = getattr(
            parser_mediator.collection_filters_helper, 'registry_find_specs',
            None)

        registry_file = dfwinreg_regf.REGFWinRegistryFile(
            ascii_codepage=parser_mediator.codepage,
            emulate_virtual_keys=False)

        try:
            registry_file.Open(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to open Windows Registry file with error: {0!s}'.
                format(exception))
            return

        try:
            win_registry = dfwinreg_registry.WinRegistry()

            key_path_prefix = win_registry.GetRegistryFileMapping(
                registry_file)
            registry_file.SetKeyPathPrefix(key_path_prefix)
            root_key = registry_file.GetRootKey()
            if root_key:
                # For now treat AMCache.hve seperately.
                if root_key.name.lower() in self._AMCACHE_ROOT_KEY_NAMES:
                    self._ParseRecurseKeys(parser_mediator, root_key)

                elif not registry_find_specs:
                    self._ParseRecurseKeys(parser_mediator, root_key)

                elif not self._ARTIFACTS_FILTER_HELPER.CheckKeyCompatibility(
                        key_path_prefix):
                    logger.warning((
                        'Artifacts filters are not supported for Windows Registry '
                        'file with key path prefix: "{0:s}".'
                    ).format(key_path_prefix))

                else:
                    win_registry.MapFile(key_path_prefix, registry_file)
                    # Note that win_registry will close the mapped registry_file.
                    registry_file = None

                    self._ParseKeysFromFindSpecs(parser_mediator, win_registry,
                                                 registry_find_specs)

        except IOError as exception:
            parser_mediator.ProduceExtractionWarning('{0!s}'.format(exception))

        finally:
            if registry_file:
                registry_file.Close()
Exemple #12
0
    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
        win_registry_reader = FileObjectWinRegistryFileReader()

        try:
            registry_file = win_registry_reader.Open(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionError(
                'unable to open Windows Registry file with error: {0!s}'.
                format(exception))
            return

        win_registry = dfwinreg_registry.WinRegistry()

        key_path_prefix = win_registry.GetRegistryFileMapping(registry_file)
        registry_file.SetKeyPathPrefix(key_path_prefix)
        root_key = registry_file.GetRootKey()
        if not root_key:
            return

        find_specs = parser_mediator.knowledge_base.GetValue(
            artifact_filters.ArtifactDefinitionsFilterHelper.
            KNOWLEDGE_BASE_VALUE)

        registry_find_specs = None
        if find_specs:
            registry_find_specs = find_specs.get(
                artifact_types.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY)

        key_path_compatible = (artifact_filters.ArtifactDefinitionsFilterHelper
                               .CheckKeyCompatibility(key_path_prefix))

        if registry_find_specs and key_path_compatible:
            try:
                win_registry.MapFile(key_path_prefix, registry_file)
                self._ParseKeysFromFindSpecs(parser_mediator, win_registry,
                                             registry_find_specs)
            except IOError as exception:
                parser_mediator.ProduceExtractionError(
                    '{0!s}'.format(exception))

        else:
            if registry_find_specs and not key_path_compatible:
                logger.warning(
                    ('Artifacts Registry Filters are not supported for '
                     'the registry prefix {0:s}. Parsing entire file.'
                     ).format(key_path_prefix))

            try:
                self._ParseRecurseKeys(parser_mediator, root_key)
            except IOError as exception:
                parser_mediator.ProduceExtractionError(
                    '{0!s}'.format(exception))
Exemple #13
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
        # TODO: set codepage from mediator.
        registry_file = dfwinreg_regf.REGFWinRegistryFile(
            ascii_codepage='cp1252', emulate_virtual_keys=False)

        try:
            registry_file.Open(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to open Windows Registry file with error: {0!s}'.
                format(exception))
            return

        try:
            win_registry = dfwinreg_registry.WinRegistry()

            key_path_prefix = win_registry.GetRegistryFileMapping(
                registry_file)
            registry_file.SetKeyPathPrefix(key_path_prefix)
            root_key = registry_file.GetRootKey()
            if root_key:
                registry_find_specs = getattr(
                    parser_mediator.collection_filters_helper,
                    'registry_find_specs', None)

                if not registry_find_specs:
                    self._ParseRecurseKeys(parser_mediator, root_key)
                else:
                    artifacts_filters_helper = (
                        artifact_filters.ArtifactDefinitionsFiltersHelper)
                    if not artifacts_filters_helper.CheckKeyCompatibility(
                            key_path_prefix):
                        logger.warning((
                            'Artifacts filters are not supported for Windows Registry file '
                            'with key path prefix: "{0:s}".'
                        ).format(key_path_prefix))
                    else:
                        win_registry.MapFile(key_path_prefix, registry_file)
                        # Note that win_registry will close the mapped registry_file.
                        registry_file = None

                        self._ParseKeysFromFindSpecs(parser_mediator,
                                                     win_registry,
                                                     registry_find_specs)

        except IOError as exception:
            parser_mediator.ProduceExtractionWarning('{0!s}'.format(exception))

        finally:
            if registry_file:
                registry_file.Close()
Exemple #14
0
  def ParseFileObject(self, parser_mediator, file_object):
    """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
    win_registry_reader = FileObjectWinRegistryFileReader()

    try:
      registry_file = win_registry_reader.Open(file_object)
    except IOError as exception:
      parser_mediator.ProduceExtractionError(
          'unable to open Windows Registry file with error: {0!s}'.format(
              exception))
      return

    win_registry = dfwinreg_registry.WinRegistry()

    key_path_prefix = win_registry.GetRegistryFileMapping(registry_file)
    registry_file.SetKeyPathPrefix(key_path_prefix)
    root_key = registry_file.GetRootKey()
    if not root_key:
      return

    find_specs = parser_mediator.knowledge_base.GetValue(
        artifact_filters.ArtifactDefinitionsFilterHelper.KNOWLEDGE_BASE_VALUE)

    registry_find_specs = None
    if find_specs:
      registry_find_specs = find_specs.get(
          artifact_types.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY)

    key_path_compatible = (
        artifact_filters.ArtifactDefinitionsFilterHelper.CheckKeyCompatibility(
            key_path_prefix))

    if registry_find_specs and key_path_compatible:
      try:
        win_registry.MapFile(key_path_prefix, registry_file)
        self._ParseKeysFromFindSpecs(
            parser_mediator, win_registry, registry_find_specs)
      except IOError as exception:
        parser_mediator.ProduceExtractionError('{0!s}'.format(exception))

    else:
      if registry_find_specs and not key_path_compatible:
        logger.warning((
            'Artifacts Registry Filters are not supported for '
            'the registry prefix {0:s}. Parsing entire file.').format(
                key_path_prefix))

      try:
        self._ParseRecurseKeys(parser_mediator, root_key)
      except IOError as exception:
        parser_mediator.ProduceExtractionError('{0!s}'.format(exception))
Exemple #15
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
        win_registry_reader = FileObjectWinRegistryFileReader()

        try:
            registry_file = win_registry_reader.Open(file_object)
        except IOError as exception:
            parser_mediator.ProduceExtractionWarning(
                'unable to open Windows Registry file with error: {0!s}'.
                format(exception))
            return

        win_registry = dfwinreg_registry.WinRegistry()

        key_path_prefix = win_registry.GetRegistryFileMapping(registry_file)
        registry_file.SetKeyPathPrefix(key_path_prefix)
        root_key = registry_file.GetRootKey()
        if not root_key:
            return

        registry_find_specs = getattr(
            parser_mediator.collection_filters_helper, 'registry_find_specs',
            None)

        if not registry_find_specs:
            try:
                self._ParseRecurseKeys(parser_mediator, root_key)
            except IOError as exception:
                parser_mediator.ProduceExtractionWarning(
                    '{0!s}'.format(exception))

        else:
            artifacts_filters_helper = (
                artifact_filters.ArtifactDefinitionsFiltersHelper)
            if not artifacts_filters_helper.CheckKeyCompatibility(
                    key_path_prefix):
                logger.warning((
                    'Artifacts filters are not supported for Windows Registry file '
                    'with key path prefix: "{0:s}".').format(key_path_prefix))

            else:
                try:
                    win_registry.MapFile(key_path_prefix, registry_file)
                    self._ParseKeysFromFindSpecs(parser_mediator, win_registry,
                                                 registry_find_specs)
                except IOError as exception:
                    parser_mediator.ProduceExtractionWarning(
                        '{0!s}'.format(exception))
Exemple #16
0
    def _ParseLogLine(self, parser_mediator, structure, key):
        """Parse a single log line and produce an event object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): identifier of the structure of tokens.
      structure (pyparsing.ParseResults): structure of tokens derived from
          a line of a text file.
    """
        time_elements_tuple = self._GetTimeElementsTuple(structure)

        try:
            date_time = dfdatetime_time_elements.TimeElements(
                time_elements_tuple=time_elements_tuple)
        except ValueError:
            parser_mediator.ProduceExtractionError(
                'invalid date time value: {0!s}'.format(structure.date_time))
            return

        self._last_month = time_elements_tuple[1]

        # If the actual entry is a repeated entry, we take the basic information
        # from the previous entry, but using the timestmap from the actual entry.
        if key == 'logline':
            self._previous_structure = structure
        else:
            structure = self._previous_structure

        # Pyparsing reads in RAW, but the text is in UTF8.
        try:
            action = structure.action.decode('utf-8')
        except UnicodeDecodeError:
            logger.warning(
                'Decode UTF8 failed, the message string may be cut short.')
            action = structure.action.decode('utf-8', 'ignore')

        event_data = MacAppFirewallLogEventData()
        event_data.action = action
        event_data.agent = structure.agent
        event_data.computer_name = structure.computer_name
        # Due to the use of CharsNotIn pyparsing structure contains whitespaces
        # that need to be removed.
        event_data.process_name = structure.process_name.strip()
        event_data.status = structure.status

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_ADDED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #17
0
  def ParseFileObject(self, parser_mediator, file_object):
    """Parses a Windows Registry file-like object.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      file_object (dfvfs.FileIO): a file-like object.
    """
    win_registry_reader = FileObjectWinRegistryFileReader()

    try:
      registry_file = win_registry_reader.Open(file_object)
    except IOError as exception:
      parser_mediator.ProduceExtractionWarning(
          'unable to open Windows Registry file with error: {0!s}'.format(
              exception))
      return

    win_registry = dfwinreg_registry.WinRegistry()

    key_path_prefix = win_registry.GetRegistryFileMapping(registry_file)
    registry_file.SetKeyPathPrefix(key_path_prefix)
    root_key = registry_file.GetRootKey()
    if not root_key:
      return

    registry_find_specs = getattr(
        parser_mediator.collection_filters_helper, 'registry_find_specs', None)

    if not registry_find_specs:
      try:
        self._ParseRecurseKeys(parser_mediator, root_key)
      except IOError as exception:
        parser_mediator.ProduceExtractionWarning('{0!s}'.format(exception))

    else:
      artifacts_filters_helper = (
          artifact_filters.ArtifactDefinitionsFiltersHelper)
      if not artifacts_filters_helper.CheckKeyCompatibility(key_path_prefix):
        logger.warning((
            'Artifacts filters are not supported for Windows Registry file '
            'with key path prefix: "{0:s}".').format(key_path_prefix))

      else:
        try:
          win_registry.MapFile(key_path_prefix, registry_file)
          self._ParseKeysFromFindSpecs(
              parser_mediator, win_registry, registry_find_specs)
        except IOError as exception:
          parser_mediator.ProduceExtractionWarning('{0!s}'.format(exception))
Exemple #18
0
    def _RawToUTF8(self, byte_stream):
        """Copies a UTF-8 byte stream into a Unicode string.

    Args:
      byte_stream (bytes): byte stream containing an UTF-8 encoded string.

    Returns:
      str: A Unicode string.
    """
        try:
            string = byte_stream.decode('utf-8')
        except UnicodeDecodeError:
            logger.warning(
                'Decode UTF8 failed, the message string may be cut short.')
            string = byte_stream.decode('utf-8', errors='ignore')
        return string.partition(b'\x00')[0]
Exemple #19
0
    def GetEntries(self, parser_mediator, cache=None, database=None, **kwargs):
        """Extracts event objects from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (Optional[ESEDBCache]): cache.
      database (Optional[pyesedb.file]): ESE database.

    Raises:
      ValueError: If the database attribute is not valid.
    """
        if database is None:
            raise ValueError('Invalid database.')

        for table_name, callback_method in sorted(self._tables.items()):
            if parser_mediator.abort:
                break

            if not callback_method:
                # Table names without a callback method are allowed to improve
                # the detection of a database based on its table names.
                continue

            callback = getattr(self, callback_method, None)
            if callback is None:
                logger.warning(
                    '[{0:s}] missing callback method: {1:s} for table: {2:s}'.
                    format(self.NAME, callback_method, table_name))
                continue

            esedb_table = database.get_table_by_name(table_name)
            if not esedb_table:
                if table_name not in self.OPTIONAL_TABLES:
                    logger.warning('[{0:s}] missing table: {1:s}'.format(
                        self.NAME, table_name))
                continue

            # The database is passed in case the database contains table names
            # that are assigned dynamically and cannot be defined by
            # the table name-callback mechanism.
            callback(parser_mediator,
                     cache=cache,
                     database=database,
                     table=esedb_table,
                     **kwargs)
Exemple #20
0
    def _ParseMRUListEntryValue(self, parser_mediator, registry_key,
                                entry_index, entry_letter, **kwargs):
        """Parses the MRUList entry value.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key that contains
           the MRUList value.
      entry_index (int): MRUList entry index.
      entry_letter (str): character value representing the entry.

    Returns:
      str: MRUList entry value.
    """
        value_string = ''

        value = registry_key.GetValueByName('{0:s}'.format(entry_letter))
        if value is None:
            logger.debug(
                '[{0:s}] Missing MRUList entry value: {1:s} in key: {2:s}.'.
                format(self.NAME, entry_letter, registry_key.path))

        elif value.DataIsString():
            value_string = value.GetDataAsObject()

        elif value.DataIsBinaryData():
            logger.debug((
                '[{0:s}] Non-string MRUList entry value: {1:s} parsed as string '
                'in key: {2:s}.').format(self.NAME, entry_letter,
                                         registry_key.path))
            utf16_stream = binary.ByteStreamCopyToUTF16Stream(value.data)

            try:
                value_string = utf16_stream.decode('utf-16-le')
            except UnicodeDecodeError as exception:
                value_string = binary.HexifyBuffer(utf16_stream)
                logger.warning((
                    '[{0:s}] Unable to decode UTF-16 stream: {1:s} in MRUList entry '
                    'value: {2:s} in key: {3:s} with error: {4!s}').format(
                        self.NAME, value_string, entry_letter,
                        registry_key.path, exception))

        return value_string
Exemple #21
0
    def _CopyUtf8ByteArrayToString(self, byte_array):
        """Copies a UTF-8 encoded byte array into a Unicode string.

    Args:
      byte_array (bytes): A byte array containing an UTF-8 encoded string.

    Returns:
      str: A Unicode string.
    """
        byte_stream = b''.join(map(chr, byte_array))

        try:
            string = byte_stream.decode('utf-8')
        except UnicodeDecodeError:
            logger.warning('Unable to decode UTF-8 formatted byte array.')
            string = byte_stream.decode('utf-8', errors='ignore')

        string, _, _ = string.partition(b'\x00')
        return string
Exemple #22
0
    def Process(self,
                parser_mediator,
                cache=None,
                database=None,
                **unused_kwargs):
        """Determine if this is the right plugin for this database.

    This function takes a SQLiteDatabase object and compares the list
    of required tables against the available tables in the database.
    If all the tables defined in REQUIRED_TABLES are present in the
    database then this plugin is considered to be the correct plugin
    and the function will return back a generator that yields event
    objects.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.

    Raises:
      ValueError: If the database or cache value are missing.
    """
        if cache is None:
            raise ValueError('Missing cache value.')

        if database is None:
            raise ValueError('Missing database value.')

        # This will raise if unhandled keyword arguments are passed.
        super(SQLitePlugin, self).Process(parser_mediator)

        for query, callback_method in self.QUERIES:
            if parser_mediator.abort:
                break

            callback = getattr(self, callback_method, None)
            if callback is None:
                logger.warning(
                    '[{0:s}] missing callback method: {1:s} for query: {2:s}'.
                    format(self.NAME, callback_method, query))
                continue

            self._ParseQuery(parser_mediator, database, query, callback, cache)
Exemple #23
0
  def GetEntries(self, parser_mediator, cache=None, database=None, **kwargs):
    """Extracts event objects from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (Optional[ESEDBCache]): cache.
      database (Optional[pyesedb.file]): ESE database.

    Raises:
      ValueError: If the database attribute is not valid.
    """
    if database is None:
      raise ValueError('Invalid database.')

    for table_name, callback_method in iter(self._tables.items()):
      if parser_mediator.abort:
        break

      if not callback_method:
        # Table names without a callback method are allowed to improve
        # the detection of a database based on its table names.
        continue

      callback = getattr(self, callback_method, None)
      if callback is None:
        logger.warning(
            '[{0:s}] missing callback method: {1:s} for table: {2:s}'.format(
                self.NAME, callback_method, table_name))
        continue

      esedb_table = database.get_table_by_name(table_name)
      if not esedb_table:
        logger.warning('[{0:s}] missing table: {1:s}'.format(
            self.NAME, table_name))
        continue

      # The database is passed in case the database contains table names
      # that are assigned dynamically and cannot be defined by
      # the table name-callback mechanism.
      callback(
          parser_mediator, cache=cache, database=database, table=esedb_table,
          **kwargs)
Exemple #24
0
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    for subkey in registry_key.GetSubkeys():
      values_dict = {}
      values_dict['subkey_name'] = subkey.name

      vendor_identification = None
      product_identification = None
      try:
        subkey_name_parts = subkey.name.split('&')
        if len(subkey_name_parts) >= 2:
          vendor_identification = subkey_name_parts[0]
          product_identification = subkey_name_parts[1]
      except ValueError as exception:
        logger.warning(
            'Unable to split string: {0:s} with error: {1!s}'.format(
                subkey.name, exception))

      if vendor_identification and product_identification:
        values_dict['vendor'] = vendor_identification
        values_dict['product'] = product_identification

      for devicekey in subkey.GetSubkeys():
        values_dict['serial'] = devicekey.name

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = registry_key.offset
        event_data.regvalue = values_dict
        event_data.source_append = self._SOURCE_APPEND

        # Last USB connection per USB device recorded in the Registry.
        event = time_events.DateTimeValuesEvent(
            devicekey.last_written_time,
            definitions.TIME_DESCRIPTION_LAST_CONNECTED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #25
0
    def __init__(self):
        """Initializes a parser."""
        super(WinRegistryParser, self).__init__()
        self._plugin_per_key_path = {}
        self._plugins_without_key_paths = []

        default_plugin_list_index = None
        key_paths = []

        for list_index, plugin in enumerate(self._plugins):
            if plugin.NAME == 'winreg_default':
                default_plugin_list_index = list_index
                continue

            for registry_key_filter in plugin.FILTERS:
                plugin_key_paths = getattr(registry_key_filter, 'key_paths',
                                           [])
                if (not plugin_key_paths
                        and plugin not in self._plugins_without_key_paths):
                    self._plugins_without_key_paths.append(plugin)
                    continue

                for plugin_key_path in plugin_key_paths:
                    plugin_key_path = plugin_key_path.lower()
                    if plugin_key_path in self._plugin_per_key_path:
                        logger.warning((
                            'Windows Registry key path: {0:s} defined by plugin: {1:s} '
                            'already set by plugin: {2:s}'
                        ).format(
                            plugin_key_path, plugin.NAME,
                            self._plugin_per_key_path[plugin_key_path].NAME))
                        continue

                    self._plugin_per_key_path[plugin_key_path] = plugin

                    key_paths.append(plugin_key_path)

        if default_plugin_list_index is not None:
            self._default_plugin = self._plugins.pop(default_plugin_list_index)

        self._path_filter = path_filter.PathFilterScanTree(
            key_paths, case_sensitive=False, path_segment_separator='\\')
Exemple #26
0
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    for subkey in registry_key.GetSubkeys():
      values_dict = {}
      values_dict['subkey_name'] = subkey.name

      vendor_identification = None
      product_identification = None
      try:
        subkey_name_parts = subkey.name.split('&')
        if len(subkey_name_parts) >= 2:
          vendor_identification = subkey_name_parts[0]
          product_identification = subkey_name_parts[1]
      except ValueError as exception:
        logger.warning(
            'Unable to split string: {0:s} with error: {1!s}'.format(
                subkey.name, exception))

      if vendor_identification and product_identification:
        values_dict['vendor'] = vendor_identification
        values_dict['product'] = product_identification

      for devicekey in subkey.GetSubkeys():
        values_dict['serial'] = devicekey.name

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.offset = registry_key.offset
        event_data.regvalue = values_dict
        event_data.source_append = self._SOURCE_APPEND

        # Last USB connection per USB device recorded in the Registry.
        event = time_events.DateTimeValuesEvent(
            devicekey.last_written_time,
            definitions.TIME_DESCRIPTION_LAST_CONNECTED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #27
0
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a log record structure.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.
    """
        if key != 'logline':
            logger.warning(
                'Unable to parse record, unknown structure: {0:s}'.format(key))
            return

        timestamp = self._GetValueFromStructure(structure, 'timestamp')
        try:
            timestamp = int(timestamp, 10)
        except (TypeError, ValueError):
            logger.debug(
                'Invalid timestamp {0!s}, skipping record'.format(timestamp))
            return

        try:
            text = self._GetValueFromStructure(structure,
                                               'text',
                                               default_value='')
            nickname, text = self._StripThenGetNicknameAndText(text)
        except pyparsing.ParseException:
            logger.debug('Error parsing entry at offset {0:d}'.format(
                self._offset))
            return

        event_data = XChatScrollbackEventData()
        event_data.nickname = nickname
        event_data.offset = self._offset
        event_data.text = text

        date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_ADDED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #28
0
  def __init__(self):
    """Initializes a parser object."""
    super(WinRegistryParser, self).__init__()
    self._plugin_per_key_path = {}
    self._plugins_without_key_paths = []

    default_plugin_list_index = None
    key_paths = []

    for list_index, plugin in enumerate(self._plugins):
      if plugin.NAME == 'winreg_default':
        default_plugin_list_index = list_index
        continue

      for registry_key_filter in plugin.FILTERS:
        plugin_key_paths = getattr(registry_key_filter, 'key_paths', [])
        if (not plugin_key_paths and
            plugin not in self._plugins_without_key_paths):
          self._plugins_without_key_paths.append(plugin)
          continue

        for plugin_key_path in plugin_key_paths:
          plugin_key_path = plugin_key_path.lower()
          if plugin_key_path in self._plugin_per_key_path:
            logger.warning((
                'Windows Registry key path: {0:s} defined by plugin: {1:s} '
                'already set by plugin: {2:s}').format(
                    plugin_key_path, plugin.NAME,
                    self._plugin_per_key_path[plugin_key_path].NAME))
            continue

          self._plugin_per_key_path[plugin_key_path] = plugin

          key_paths.append(plugin_key_path)

    if default_plugin_list_index is not None:
      self._default_plugin = self._plugins.pop(default_plugin_list_index)

    self._path_filter = path_filter.PathFilterScanTree(
        key_paths, case_sensitive=False, path_segment_separator='\\')
Exemple #29
0
  def Process(
      self, parser_mediator, cache=None, database=None, **unused_kwargs):
    """Determine if this is the right plugin for this database.

    This function takes a SQLiteDatabase object and compares the list
    of required tables against the available tables in the database.
    If all the tables defined in REQUIRED_TABLES are present in the
    database then this plugin is considered to be the correct plugin
    and the function will return back a generator that yields event
    objects.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.

    Raises:
      ValueError: If the database or cache value are missing.
    """
    if cache is None:
      raise ValueError('Missing cache value.')

    if database is None:
      raise ValueError('Missing database value.')

    # This will raise if unhandled keyword arguments are passed.
    super(SQLitePlugin, self).Process(parser_mediator)

    for query, callback_method in self.QUERIES:
      if parser_mediator.abort:
        break

      callback = getattr(self, callback_method, None)
      if callback is None:
        logger.warning(
            '[{0:s}] missing callback method: {1:s} for query: {2:s}'.format(
                self.NAME, callback_method, query))
        continue

      self._ParseQuery(parser_mediator, database, query, callback, cache)
Exemple #30
0
    def _GetEarliestYearFromFileEntry(self):
        """Retrieves the year from the file entry date and time values.

    This function uses the creation time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
        file_entry = self.GetFileEntry()
        if not file_entry:
            return None

        stat_object = file_entry.GetStat()

        posix_time = getattr(stat_object, 'crtime', None)
        if posix_time is None:
            posix_time = getattr(stat_object, 'ctime', None)

        # Gzip files don't store the creation or metadata modification times,
        # but the modification time stored in the file is a good proxy.
        if file_entry.TYPE_INDICATOR == dfvfs_definitions.TYPE_INDICATOR_GZIP:
            posix_time = getattr(stat_object, 'mtime', None)

        if posix_time is None:
            logger.warning(
                'Unable to determine earliest year from file stat information.'
            )
            return None

        try:
            year = timelib.GetYearFromPosixTime(
                posix_time, timezone=self._knowledge_base.timezone)
            return year
        except ValueError as exception:
            logger.error((
                'Unable to determine earliest year from file stat information with '
                'error: {0:s}').format(exception))
            return None
Exemple #31
0
    def _GetTimestamps(self, olecf_item):
        """Retrieves the timestamps from an OLECF item.

    Args:
      olecf_item (pyolecf.item): OLECF item.

    Returns:
      tuple[int, int]: creation and modification FILETIME timestamp.
    """
        if not olecf_item:
            return None, None

        try:
            creation_time = olecf_item.get_creation_time_as_integer()
        except OverflowError as exception:
            logger.warning(
                'Unable to read the creation time with error: {0!s}'.format(
                    exception))
            creation_time = 0

        try:
            modification_time = olecf_item.get_modification_time_as_integer()
        except OverflowError as exception:
            logger.warning(
                'Unable to read the modification time with error: {0!s}'.
                format(exception))
            modification_time = 0

        # If no useful events, return early.
        if not creation_time and not modification_time:
            return None, None

        # Office template documents sometimes contain a creation time
        # of -1 (0xffffffffffffffff).
        if creation_time == 0xffffffffffffffff:
            creation_time = 0

        return creation_time, modification_time
Exemple #32
0
  def _GetTimestamps(self, olecf_item):
    """Retrieves the timestamps from an OLECF item.

    Args:
      olecf_item (pyolecf.item): OLECF item.

    Returns:
      tuple[int, int]: creation and modification FILETIME timestamp.
    """
    if not olecf_item:
      return None, None

    try:
      creation_time = olecf_item.get_creation_time_as_integer()
    except OverflowError as exception:
      logger.warning(
          'Unable to read the creation time with error: {0!s}'.format(
              exception))
      creation_time = 0

    try:
      modification_time = olecf_item.get_modification_time_as_integer()
    except OverflowError as exception:
      logger.warning(
          'Unable to read the modification time with error: {0!s}'.format(
              exception))
      modification_time = 0

    # If no useful events, return early.
    if not creation_time and not modification_time:
      return None, None

    # Office template documents sometimes contain a creation time
    # of -1 (0xffffffffffffffff).
    if creation_time == 0xffffffffffffffff:
      creation_time = 0

    return creation_time, modification_time
Exemple #33
0
  def _GetEarliestYearFromFileEntry(self):
    """Retrieves the year from the file entry date and time values.

    This function uses the creation time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None.
    """
    file_entry = self.GetFileEntry()
    if not file_entry:
      return None

    stat_object = file_entry.GetStat()

    posix_time = getattr(stat_object, 'crtime', None)
    if posix_time is None:
      posix_time = getattr(stat_object, 'ctime', None)

    # Gzip files don't store the creation or metadata modification times,
    # but the modification time stored in the file is a good proxy.
    if file_entry.TYPE_INDICATOR == dfvfs_definitions.TYPE_INDICATOR_GZIP:
      posix_time = getattr(stat_object, 'mtime', None)

    if posix_time is None:
      logger.warning(
          'Unable to determine earliest year from file stat information.')
      return None

    try:
      year = timelib.GetYearFromPosixTime(
          posix_time, timezone=self._knowledge_base.timezone)
      return year
    except ValueError as exception:
      logger.error((
          'Unable to determine earliest year from file stat information with '
          'error: {0!s}').format(exception))
      return None
Exemple #34
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        event_data = WindowsUSBDeviceEventData()
        event_data.key_path = (registry_key.path).replace("\\", "/")

        for subkey in registry_key.GetSubkeys():
            event_data.subkey_name = subkey.name

            vendor_identification = None
            product_identification = None
            try:
                subkey_name_parts = subkey.name.split('&')
                if len(subkey_name_parts) >= 2:
                    vendor_identification = subkey_name_parts[0]
                    product_identification = subkey_name_parts[1]
            except ValueError as exception:
                logger.warning(
                    'Unable to split string: {0:s} with error: {1!s}'.format(
                        subkey.name, exception))

            if vendor_identification and product_identification:
                event_data.vendor = vendor_identification
                event_data.product = product_identification

            for devicekey in subkey.GetSubkeys():
                event_data.serial = devicekey.name

                # Last USB connection per USB device recorded in the Registry.
                event = time_events.DateTimeValuesEvent(
                    devicekey.last_written_time,
                    definitions.TIME_DESCRIPTION_LAST_CONNECTED)
                parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #35
0
    def _GetLatestYearFromFileEntry(self):
        """Retrieves the maximum (highest value) year from the file entry.

    This function uses the modification time if available otherwise the change
    time (metadata last modification time) is used.

    Returns:
      int: year of the file entry or None if the year cannot be retrieved.
    """
        file_entry = self.GetFileEntry()
        if not file_entry:
            return None

        date_time = file_entry.modification_time
        if not date_time:
            date_time = file_entry.change_time

        if date_time is None:
            logger.warning('File entry has no modification or change time.')
            return None

        year, _, _ = date_time.GetDate()
        return year
Exemple #36
0
  def ParseRecord(self, parser_mediator, key, structure):
    """Parses a log record structure.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.
    """
    if key != 'logline':
      logger.warning(
          'Unable to parse record, unknown structure: {0:s}'.format(key))
      return

    timestamp = self._GetValueFromStructure(structure, 'timestamp')
    try:
      timestamp = int(timestamp, 10)
    except (TypeError, ValueError):
      logger.debug('Invalid timestamp {0!s}, skipping record'.format(timestamp))
      return

    try:
      text = self._GetValueFromStructure(structure, 'text', default_value='')
      nickname, text = self._StripThenGetNicknameAndText(text)
    except pyparsing.ParseException:
      logger.debug('Error parsing entry at offset {0:d}'.format(self._offset))
      return

    event_data = XChatScrollbackEventData()
    event_data.nickname = nickname
    event_data.offset = self._offset
    event_data.text = text

    date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_ADDED)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #37
0
    def _GetTextFromNullTerminatedString(self,
                                         null_terminated_string,
                                         default_string='N/A'):
        """Get a UTF-8 text from a raw null terminated string.

    Args:
      null_terminated_string: Raw string terminated with null character.
      default_string: The default string returned if the parser fails.

    Returns:
      A decoded UTF-8 string or if unable to decode, the supplied default
      string.
    """
        text, _, _ = null_terminated_string.partition(b'\x00')
        try:
            text = text.decode('utf-8')
        except UnicodeDecodeError:
            logger.warning(
                '[UTMP] Decode UTF8 failed, the message string may be cut short.'
            )
            text = text.decode('utf-8', 'ignore')
        if not text:
            return default_string
        return text
Exemple #38
0
    def _ReduceParserFilters(cls, includes, excludes):
        """Reduces the parsers and plugins to include and exclude.

    If an intersection is found, the parser or plugin is removed from
    the inclusion set. If a parser is not in inclusion set there is no need
    to have it in the exclusion set.

    Args:
      includes (dict[str, BaseParser]): included parsers and plugins by name.
      excludes (dict[str, BaseParser]): excluded parsers and plugins by name.
    """
        if not includes or not excludes:
            return

        for parser_name in set(includes).intersection(excludes):
            # Check parser and plugin list for exact equivalence.
            if includes[parser_name] == excludes[parser_name]:
                logger.warning(
                    'Parser {0:s} was in both the inclusion and exclusion lists. '
                    'Ignoring included parser.'.format(parser_name))
                includes.pop(parser_name)
                continue

            # Remove plugins that defined are in both inclusion and exclusion lists.
            plugin_includes = includes[parser_name]
            plugin_excludes = excludes[parser_name]
            intersection = set(plugin_includes).intersection(plugin_excludes)
            if not intersection:
                continue

            logger.warning(
                'Parser {0:s} plugins: {1:s} in both the inclusion and exclusion '
                'lists. Ignoring included plugins.'.format(
                    parser_name, ', '.join(intersection)))
            plugins_list = list(set(plugin_includes).difference(intersection))
            includes[parser_name] = plugins_list

        # Remove excluded parsers that do not run.
        parsers_to_pop = []
        for parser_name in excludes:
            if parser_name in includes:
                continue

            logger.warning(
                'The excluded parser: {0:s} is not associated with the included '
                'parsers: {1:s}. Ignoring excluded parser.'.format(
                    parser_name, ', '.join(includes.keys())))
            parsers_to_pop.append(parser_name)

        for parser_name in parsers_to_pop:
            excludes.pop(parser_name)
Exemple #39
0
  def _ReduceParserFilters(cls, includes, excludes):
    """Reduces the parsers and plugins to include and exclude.

    If an intersection is found, the parser or plugin is removed from
    the inclusion set. If a parser is not in inclusion set there is no need
    to have it in the exclusion set.

    Args:
      includes (dict[str, BaseParser]): included parsers and plugins by name.
      excludes (dict[str, BaseParser]): excluded parsers and plugins by name.
    """
    if not includes or not excludes:
      return

    for parser_name in set(includes).intersection(excludes):
      # Check parser and plugin list for exact equivalence.
      if includes[parser_name] == excludes[parser_name]:
        logger.warning(
            'Parser {0:s} was in both the inclusion and exclusion lists. '
            'Ignoring included parser.'.format(parser_name))
        includes.pop(parser_name)
        continue

      # Remove plugins that defined are in both inclusion and exclusion lists.
      plugin_includes = includes[parser_name]
      plugin_excludes = excludes[parser_name]
      intersection = set(plugin_includes).intersection(plugin_excludes)
      if not intersection:
        continue

      logger.warning(
          'Parser {0:s} plugins: {1:s} in both the inclusion and exclusion '
          'lists. Ignoring included plugins.'.format(
              parser_name, ', '.join(intersection)))
      plugins_list = list(set(plugin_includes).difference(intersection))
      includes[parser_name] = plugins_list

    # Remove excluded parsers that do not run.
    parsers_to_pop = []
    for parser_name in excludes:
      if parser_name in includes:
        continue

      logger.warning(
          'The excluded parser: {0:s} is not associated with the included '
          'parsers: {1:s}. Ignoring excluded parser.'.format(
              parser_name, ', '.join(includes.keys())))
      parsers_to_pop.append(parser_name)

    for parser_name in parsers_to_pop:
      excludes.pop(parser_name)
Exemple #40
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        for subkey in registry_key.GetSubkeys():
            subkey_name = subkey.name

            name_values = subkey_name.split('&')
            number_of_name_values = len(name_values)

            # Normally we expect 4 fields here however that is not always the case.
            if number_of_name_values != 4:
                logger.warning(
                    'Expected 4 &-separated values in: {0:s}'.format(
                        subkey_name))

            event_data = USBStorEventData()
            event_data.key_path = registry_key.path
            event_data.subkey_name = subkey_name

            if number_of_name_values >= 1:
                event_data.device_type = name_values[0]
            if number_of_name_values >= 2:
                event_data.vendor = name_values[1]
            if number_of_name_values >= 3:
                event_data.product = name_values[2]
            if number_of_name_values >= 4:
                event_data.revision = name_values[3]

            if subkey.number_of_subkeys == 0:
                # Time last USB device of this class was first inserted.
                event = time_events.DateTimeValuesEvent(
                    subkey.last_written_time,
                    definitions.TIME_DESCRIPTION_WRITTEN)
                parser_mediator.ProduceEventWithEventData(event, event_data)
                continue

            for device_key in subkey.GetSubkeys():
                event_data.serial = device_key.name

                friendly_name_value = device_key.GetValueByName('FriendlyName')
                if friendly_name_value:
                    event_data.display_name = friendly_name_value.GetDataAsObject(
                    )

                # ParentIdPrefix applies to Windows XP Only.
                parent_id_prefix_value = device_key.GetValueByName(
                    'ParentIdPrefix')
                if parent_id_prefix_value:
                    event_data.parent_id_prefix = parent_id_prefix_value.GetDataAsObject(
                    )

                # Time last USB device of this class was first inserted.
                event = time_events.DateTimeValuesEvent(
                    subkey.last_written_time,
                    definitions.TIME_DESCRIPTION_WRITTEN)
                parser_mediator.ProduceEventWithEventData(event, event_data)

                # Win7 - Last Connection.
                # Vista/XP - Time of an insert.
                event = time_events.DateTimeValuesEvent(
                    device_key.last_written_time,
                    definitions.TIME_DESCRIPTION_WRITTEN)
                parser_mediator.ProduceEventWithEventData(event, event_data)

                device_parameter_key = device_key.GetSubkeyByName(
                    'Device Parameters')
                if device_parameter_key:
                    event = time_events.DateTimeValuesEvent(
                        device_parameter_key.last_written_time,
                        definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                log_configuration_key = device_key.GetSubkeyByName('LogConf')
                if log_configuration_key:
                    event = time_events.DateTimeValuesEvent(
                        log_configuration_key.last_written_time,
                        definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

                properties_key = device_key.GetSubkeyByName('Properties')
                if properties_key:
                    event = time_events.DateTimeValuesEvent(
                        properties_key.last_written_time,
                        definitions.TIME_DESCRIPTION_WRITTEN)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
Exemple #41
0
  def _GetRecordValues(
      self, parser_mediator, table_name, record, value_mappings=None):
    """Retrieves the values from the record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table_name (str): name of the table.
      record (pyesedb.record): ESE record.
      value_mappings (Optional[dict[str, str]): value mappings, which map
          the column name to a callback method.

    Returns:
      dict[str,object]: values per column name.
    """
    record_values = {}

    for value_entry in range(0, record.number_of_values):
      if parser_mediator.abort:
        break

      column_name = record.get_column_name(value_entry)
      if column_name in record_values:
        logger.warning(
            '[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                self.NAME, column_name, table_name))
        continue

      value_callback = None
      if value_mappings and column_name in value_mappings:
        value_callback_method = value_mappings.get(column_name)
        if value_callback_method:
          value_callback = getattr(self, value_callback_method, None)
          if value_callback is None:
            logger.warning((
                '[{0:s}] missing value callback method: {1:s} for column: '
                '{2:s} in table: {3:s}').format(
                    self.NAME, value_callback_method, column_name, table_name))

      if value_callback:
        try:
          value_data = record.get_value_data(value_entry)
          value = value_callback(value_data)

        except Exception as exception:  # pylint: disable=broad-except
          logger.error(exception)
          value = None
          parser_mediator.ProduceExtractionWarning((
              'unable to parse value: {0:s} with callback: {1:s} with error: '
              '{2!s}').format(column_name, value_callback_method, exception))

      else:
        try:
          value = self._GetRecordValue(record, value_entry)
        except ValueError as exception:
          value = None
          parser_mediator.ProduceExtractionWarning(
              'unable to parse value: {0:s} with error: {1!s}'.format(
                  column_name, exception))

      record_values[column_name] = value

    return record_values
Exemple #42
0
    def TryWithUntestedStructures(self, file_object, token_id, pending):
        """Try to parse the pending part of the entry using untested structures.

    Args:
      file_object: BSM file.
      token_id: integer with the id that comes from the unknown token.
      pending: pending length of the entry.

    Returns:
      A list of extra tokens data that can be parsed using non-tested
      structures. A message indicating that a structure cannot be parsed
      is added for unparsed structures or None on error.
    """
        # Data from the unknown structure.
        start_position = file_object.tell()
        start_token_id = token_id
        extra_tokens = {}

        # Read all the "pending" bytes.
        try:
            if token_id in self._bsm_type_list_all:
                token = self._bsm_type_list_all[token_id][1].parse_stream(
                    file_object)
                new_extra_tokens = self.FormatToken(token_id, token,
                                                    file_object)
                extra_tokens.update(new_extra_tokens)
                while file_object.tell() < (start_position + pending):
                    # Check if it is a known token.
                    try:
                        token_id = self._BSM_TOKEN.parse_stream(file_object)
                    except (IOError, construct.FieldError):
                        logger.warning(
                            'Unable to parse the Token ID at position: {0:d}'.
                            format(file_object.tell()))
                        return None
                    if token_id not in self._bsm_type_list_all:
                        break
                    token = self._bsm_type_list_all[token_id][1].parse_stream(
                        file_object)
                    new_extra_tokens = self.FormatToken(
                        token_id, token, file_object)
                    extra_tokens.update(new_extra_tokens)
        except (IOError, construct.FieldError):
            token_id = 255

        next_entry = (start_position + pending)
        if file_object.tell() != next_entry:
            # Unknown Structure.
            logger.warning(
                'Unknown Token at "0x{0:X}", ID: {1} (0x{2:X})'.format(
                    start_position - 1, token_id, token_id))
            # TODO: another way to save this information must be found.
            extra_tokens.update({
                'message':
                self.MESSAGE_CAN_NOT_SAVE.format(start_position - 1,
                                                 start_token_id)
            })
            # Move to next entry.
            file_object.seek(next_entry - file_object.tell(), os.SEEK_CUR)
            # It returns null list because it doesn't know which structure was
            # the incorrect structure that makes that it can arrive to the spected
            # end of the entry.
            return {}
        return extra_tokens
Exemple #43
0
    def _ParseBSMEvent(self, parser_mediator, file_object):
        """Parses a BSM entry (BSMEvent) from the file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      bool: True if the BSM entry was parsed.
    """
        record_start_offset = file_object.tell()

        try:
            token_type = self._BSM_TOKEN.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError((
                'unable to parse BSM token type at offset: 0x{0:08x} with error: '
                '{1:s}.').format(record_start_offset, exception))
            return False

        if token_type not in self._BSM_HEADER_TOKEN_TYPES:
            parser_mediator.ProduceExtractionError(
                'unsupported token type: {0:d} at offset: 0x{1:08x}.'.format(
                    token_type, record_start_offset))
            # TODO: if it is a MacOS, search for the trailer magic value
            #       as a end of the entry can be a possibility to continue.
            return False

        _, record_structure = self._BSM_TOKEN_TYPES.get(token_type, ('', None))

        try:
            token = record_structure.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                ('unable to parse BSM record at offset: 0x{0:08x} with error: '
                 '{1:s}.').format(record_start_offset, exception))
            return False

        event_type = bsmtoken.BSM_AUDIT_EVENT.get(token.bsm_header.event_type,
                                                  'UNKNOWN')
        event_type = '{0:s} ({1:d})'.format(event_type,
                                            token.bsm_header.event_type)

        timestamp = (token.timestamp * 1000000) + token.microseconds
        date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
            timestamp=timestamp)

        record_length = token.bsm_header.length
        record_end_offset = record_start_offset + record_length

        # A dict of tokens that has the entry.
        extra_tokens = {}

        # Read until we reach the end of the record.
        while file_object.tell() < record_end_offset:
            # Check if it is a known token.
            try:
                token_type = self._BSM_TOKEN.parse_stream(file_object)
            except (IOError, construct.FieldError):
                logger.warning(
                    'Unable to parse the Token ID at position: {0:d}'.format(
                        file_object.tell()))
                return False

            _, record_structure = self._BSM_TOKEN_TYPES.get(
                token_type, ('', None))

            if not record_structure:
                pending = record_end_offset - file_object.tell()
                new_extra_tokens = self.TryWithUntestedStructures(
                    file_object, token_type, pending)
                extra_tokens.update(new_extra_tokens)
            else:
                token = record_structure.parse_stream(file_object)
                new_extra_tokens = self.FormatToken(token_type, token,
                                                    file_object)
                extra_tokens.update(new_extra_tokens)

        if file_object.tell() > record_end_offset:
            logger.warning('Token ID {0:d} not expected at position 0x{1:08x}.'
                           'Jumping for the next entry.'.format(
                               token_type, file_object.tell()))
            try:
                file_object.seek(record_end_offset - file_object.tell(),
                                 os.SEEK_CUR)
            except (IOError, construct.FieldError) as exception:
                logger.warning(
                    'Unable to jump to next entry with error: {0!s}'.format(
                        exception))
                return False

        event_data = BSMEventData()
        if parser_mediator.operating_system == definitions.OPERATING_SYSTEM_MACOS:
            # BSM can be in more than one OS: BSD, Solaris and MacOS.
            # In MacOS the last two tokens are the return status and the trailer.
            return_value = extra_tokens.get('BSM_TOKEN_RETURN32')
            if not return_value:
                return_value = extra_tokens.get('BSM_TOKEN_RETURN64')
            if not return_value:
                return_value = 'UNKNOWN'

            event_data.return_value = return_value

        event_data.event_type = event_type
        event_data.extra_tokens = extra_tokens
        event_data.offset = record_start_offset
        event_data.record_length = record_length

        # TODO: check why trailer was passed to event in original while
        # event was expecting record length.
        # if extra_tokens:
        #   trailer = extra_tokens.get('BSM_TOKEN_TRAILER', 'unknown')

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        return True
Exemple #44
0
  def _GetRecordValues(
      self, parser_mediator, table_name, record, value_mappings=None):
    """Retrieves the values from the record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      table_name (str): name of the table.
      record (pyesedb.record): ESE record.
      value_mappings (Optional[dict[str, str]): value mappings, which map
          the column name to a callback method.

    Returns:
      dict[str,object]: values per column name.
    """
    record_values = {}

    for value_entry in range(0, record.number_of_values):
      if parser_mediator.abort:
        break

      column_name = record.get_column_name(value_entry)
      if column_name in record_values:
        logger.warning(
            '[{0:s}] duplicate column: {1:s} in table: {2:s}'.format(
                self.NAME, column_name, table_name))
        continue

      value_callback = None
      if value_mappings and column_name in value_mappings:
        value_callback_method = value_mappings.get(column_name)
        if value_callback_method:
          value_callback = getattr(self, value_callback_method, None)
          if value_callback is None:
            logger.warning((
                '[{0:s}] missing value callback method: {1:s} for column: '
                '{2:s} in table: {3:s}').format(
                    self.NAME, value_callback_method, column_name, table_name))

      if value_callback:
        try:
          value_data = record.get_value_data(value_entry)
          value = value_callback(value_data)

        except Exception as exception:  # pylint: disable=broad-except
          logger.error(exception)
          value = None
          parser_mediator.ProduceExtractionError((
              'unable to parse value: {0:s} with callback: {1:s} with error: '
              '{2!s}').format(column_name, value_callback_method, exception))

      else:
        try:
          value = self._GetRecordValue(record, value_entry)
        except ValueError as exception:
          value = None
          parser_mediator.ProduceExtractionError(
              'unable to parse value: {0:s} with error: {1!s}'.format(
                  column_name, exception))

      record_values[column_name] = value

    return record_values
Exemple #45
0
  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    for subkey in registry_key.GetSubkeys():
      values_dict = {}
      values_dict['subkey_name'] = subkey.name

      name_values = subkey.name.split('&')
      number_of_name_values = len(name_values)

      # Normally we expect 4 fields here however that is not always the case.
      if number_of_name_values != 4:
        logger.warning(
            'Expected 4 &-separated values in: {0:s}'.format(subkey.name))

      if number_of_name_values >= 1:
        values_dict['device_type'] = name_values[0]
      if number_of_name_values >= 2:
        values_dict['vendor'] = name_values[1]
      if number_of_name_values >= 3:
        values_dict['product'] = name_values[2]
      if number_of_name_values >= 4:
        values_dict['revision'] = name_values[3]

      event_data = windows_events.WindowsRegistryEventData()
      event_data.key_path = registry_key.path
      event_data.offset = registry_key.offset
      event_data.regvalue = values_dict
      event_data.source_append = self._SOURCE_APPEND

      if subkey.number_of_subkeys == 0:
        # Time last USB device of this class was first inserted.
        event = time_events.DateTimeValuesEvent(
            subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
        continue

      for device_key in subkey.GetSubkeys():
        values_dict['serial'] = device_key.name

        friendly_name_value = device_key.GetValueByName('FriendlyName')
        if friendly_name_value:
          values_dict['friendly_name'] = friendly_name_value.GetDataAsObject()
        else:
          values_dict.pop('friendly_name', None)

        # ParentIdPrefix applies to Windows XP Only.
        parent_id_prefix_value = device_key.GetValueByName('ParentIdPrefix')
        if parent_id_prefix_value:
          values_dict['parent_id_prefix'] = (
              parent_id_prefix_value.GetDataAsObject())
        else:
          values_dict.pop('parent_id_prefix', None)

        # Time last USB device of this class was first inserted.
        event = time_events.DateTimeValuesEvent(
            subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # Win7 - Last Connection.
        # Vista/XP - Time of an insert.
        event = time_events.DateTimeValuesEvent(
            device_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        device_parameter_key = device_key.GetSubkeyByName('Device Parameters')
        if device_parameter_key:
          event = time_events.DateTimeValuesEvent(
              device_parameter_key.last_written_time,
              definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

        log_configuration_key = device_key.GetSubkeyByName('LogConf')
        if log_configuration_key:
          event = time_events.DateTimeValuesEvent(
              log_configuration_key.last_written_time,
              definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)

        properties_key = device_key.GetSubkeyByName('Properties')
        if properties_key:
          event = time_events.DateTimeValuesEvent(
              properties_key.last_written_time,
              definitions.TIME_DESCRIPTION_WRITTEN)
          parser_mediator.ProduceEventWithEventData(event, event_data)