Exemplo n.º 1
0
    def ParseStatusRow(self,
                       parser_mediator,
                       row,
                       query=None,
                       **unused_kwargs):
        """Parses a contact row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row resulting from query.
      query (Optional[str]): query.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        event_data = TwitterIOSStatusEventData()
        event_data.favorite_count = row['favoriteCount']
        event_data.favorited = row['favorited']
        event_data.name = row['name']
        event_data.query = query
        event_data.retweet_count = row['retweetCount']
        event_data.text = row['text']
        event_data.user_id = row['user_id']

        timestamp = row['date']
        if timestamp:
            # Convert the floating point value to an integer.
            timestamp = int(timestamp)
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = row['updatedAt']
        if timestamp:
            # Convert the floating point value to an integer.
            timestamp = int(timestamp)
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_UPDATE)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 2
0
    def testGetMessages(self):
        """Tests the GetMessages method."""
        date_time = dfdatetime_posix_time.PosixTime(timestamp=1457771210)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
        event.data_type = 'bash:history:command'
        event.command = 'cd plaso'

        expected_messages = ('Command executed: cd plaso', 'cd plaso')
        messages = self._formatter.GetMessages(None, event)
        self.assertEqual(messages, expected_messages)
Exemplo n.º 3
0
  def modification_time(self):
    """dfdatetime.DateTimeValues: modification time or None if not available."""
    if self._stat_info is None:
      return None

    timestamp = getattr(self._stat_info, 'st_mtime_ns', None)
    if timestamp is not None:
      return dfdatetime_posix_time.PosixTimeInNanoseconds(timestamp=timestamp)

    timestamp = int(self._stat_info.st_mtime)
    return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
Exemplo n.º 4
0
  def testGetAttributeNames(self):
    """Tests the GetAttributeNames function."""
    posix_time = dfdatetime_posix_time.PosixTime(timestamp=0)
    attribute_container = time_events.DateTimeValuesEvent(posix_time, 'usage')

    expected_attribute_names = [
        '_event_data_row_identifier', 'parser', 'timestamp', 'timestamp_desc']

    attribute_names = sorted(attribute_container.GetAttributeNames())

    self.assertEqual(attribute_names, expected_attribute_names)
Exemplo n.º 5
0
  def ParseGenericRow(
      self, parser_mediator, query, row, **unused_kwargs):
    """Parses a generic windows timeline row.

      Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
    """
    query_hash = hash(query)

    event_data = WindowsTimelineGenericEventData()

    # Payload is JSON serialized as binary data in a BLOB field, with the text
    # encoded as UTF-8.
    payload_json_bytes = bytes(self._GetRowValue(query_hash, row, 'Payload'))
    payload_json_string = payload_json_bytes.decode('utf-8')
    # AppId is JSON stored as unicode text.
    appid_entries_string = self._GetRowValue(query_hash, row, 'AppId')

    payload = json.loads(payload_json_string)
    appid_entries = json.loads(appid_entries_string)

    # Attempt to populate the package_identifier field by checking each of
    # these fields in the AppId JSON.
    package_id_locations = [
        'packageId', 'x_exe_path', 'windows_win32', 'windows_universal',
        'alternateId']
    for location in package_id_locations:
      for entry in appid_entries:
        if entry['platform'] == location and entry['application'] != '':
          event_data.package_identifier = entry['application']
          break
      if event_data.package_identifier is None:
        # package_identifier has been populated and we're done.
        break

    if 'description' in payload:
      event_data.description = payload['description']
    else:
      event_data.description = ''

    if 'appDisplayName' in payload and payload['appDisplayName'] != '':
      event_data.application_display_name = payload['appDisplayName']
    elif 'displayText' in payload and payload['displayText'] != '':
      # Fall back to displayText if appDisplayName isn't available
      event_data.application_display_name = payload['displayText']

    timestamp = self._GetRowValue(query_hash, row, 'StartTime')
    date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_START)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 6
0
    def Process(self, parser_mediator, decoded_values=None, **kwargs):
        """Extracts events from Transmission's resume folder files.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      decoded_values (Optional[collections.OrderedDict[bytes|str, object]]):
          decoded values.
    """
        # This will raise if unhandled keyword arguments are passed.
        super(TransmissionPlugin, self).Process(parser_mediator, **kwargs)

        destination = self._GetDecodedValue(decoded_values, 'destination')
        seeding_time = self._GetDecodedValue(decoded_values,
                                             'seeding-time-seconds')

        event_data = TransmissionEventData()
        event_data.destination = destination
        # Convert seconds to minutes.
        event_data.seedtime, _ = divmod(seeding_time, 60)

        timestamp = self._GetDecodedValue(decoded_values, 'added-date')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ADDED)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetDecodedValue(decoded_values, 'done-date')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetDecodedValue(decoded_values, 'activity-date')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 7
0
  def testGetMessages(self):
    """Tests the GetMessages method."""
    date_time = dfdatetime_posix_time.PosixTime(timestamp=1457771210)
    event = time_events.DateTimeValuesEvent(
        date_time, eventdata.EventTimestamp.MODIFICATION_TIME)
    event.command = u'cd plaso'
    event.data_type = u'shell:zsh:history'
    event.elapsed_seconds = 0

    expected_messages = (u'cd plaso Time elapsed: 0 seconds', u'cd plaso')
    messages = self._formatter.GetMessages(None, event)
    self.assertEqual(messages, expected_messages)
Exemplo n.º 8
0
  def testGetAttributeNames(self):
    """Tests the GetAttributeNames function."""
    posix_time = dfdatetime_posix_time.PosixTime(timestamp=0)
    attribute_container = time_events.DateTimeValuesEvent(posix_time, u'usage')

    expected_attribute_names = [
        u'data_type', u'display_name', u'filename', u'hostname', u'inode',
        u'offset', u'pathspec', u'tag', u'timestamp', u'timestamp_desc']

    attribute_names = sorted(attribute_container.GetAttributeNames())

    self.assertEqual(attribute_names, expected_attribute_names)
Exemplo n.º 9
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses an Windows FileHistory Restore.log file-like object.

        Args:
            parser_mediator (ParseMediator): mediates interactions between parsers
                and other components, such as storage and dfvfs.
            key (str):
            structure (pyparsing.ParseResults):

        Raises:
            ParseError: when the structure type is unknown.
        """
        event_data = FileHistoryRestoreLogEventData()
        encoding = self._ENCODING or parser_mediator.codepage

        text_file_object = text_parser.text_file.TextFile(file_object,
                                                          encoding=encoding)
        line = ''
        try:
            line = text_file_object.readline(400)

        except UnicodeDecodeError:
            errors.UnableToParseFile(
                'Not a text file or encoding not supported')

        if not line:
            raise errors.UnableToParseFile('Not a text file.')

        if not line.startswith("<"):
            raise errors.UnableToParseFile(
                'Not an Windows FileHistory Restore.log file.')

        split_line = line.split(' ')
        len_split_line = len(split_line)
        if len_split_line is not 8:
            raise errors.UnableToParseFile(
                'Not an Windows FileHistory Restore.log file.')

        event_data.file_record_id = int(split_line[2].replace("\x00", ""), 16)
        event_data.restored_file = split_line[3].replace("\x00", "")
        event_data.usn = int(split_line[4].replace("\x00", ""), 16)
        temp_creation_date = int(split_line[6].replace("\x00", ""), 16)
        event_data.creation_date = dfdatetime_filetime.\
            Filetime(timestamp=temp_creation_date).CopyToDateTimeString()
        temp_modification_date = int(split_line[7].replace("\x00", "")[:-2],
                                     16)
        event_data.modification_date = dfdatetime_filetime.\
            Filetime(timestamp=temp_modification_date).CopyToDateTimeString()

        date_time = dfdatetime_posix_time.PosixTime(timestamp=0)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 10
0
    def ParseRow(self, parser_mediator, row_offset, row):
        """Parses a line of the log file and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row_offset (int): number of the corresponding line.
      row (dict[str, str]): fields of a single row, as specified in COLUMNS.
    """
        filename = row.get('name', None)
        md5_hash = row.get('md5', None)
        mode = row.get('mode_as_string', None)

        inode_number = row.get('inode', None)
        if '-' in inode_number:
            inode_number, _, _ = inode_number.partition('-')

        try:
            inode_number = int(inode_number, 10)
        except (TypeError, ValueError):
            inode_number = None

        data_size = self._GetIntegerValue(row, 'size')
        user_uid = self._GetIntegerValue(row, 'uid')
        user_gid = self._GetIntegerValue(row, 'gid')

        event_data = MactimeEventData()
        event_data.filename = filename
        event_data.inode = inode_number
        event_data.md5 = md5_hash
        event_data.mode_as_string = mode
        event_data.offset = row_offset
        event_data.size = data_size
        event_data.user_gid = user_gid

        if user_uid is None:
            event_data.user_sid = None
        else:
            # Note that the user_sid value is expected to be a string.
            event_data.user_sid = '{0:d}'.format(user_uid)

        for value_name, timestamp_description in iter(
                self._TIMESTAMP_DESC_MAP.items()):
            posix_time = self._GetIntegerValue(row, value_name)
            # mactime will return 0 if the timestamp is not set.
            if not posix_time:
                continue

            date_time = dfdatetime_posix_time.PosixTime(timestamp=posix_time)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    timestamp_description)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 11
0
  def ParseStatusRow(self, parser_mediator, query, row, **unused_kwargs):
    """Parses a contact row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
    """
    query_hash = hash(query)

    event_data = TwitterIOSStatusEventData()
    event_data.favorite_count = self._GetRowValue(
        query_hash, row, 'favoriteCount')
    event_data.favorited = self._GetRowValue(query_hash, row, 'favorited')
    event_data.name = self._GetRowValue(query_hash, row, 'name')
    event_data.query = query
    event_data.retweet_count = self._GetRowValue(
        query_hash, row, 'retweetCount')
    event_data.text = self._GetRowValue(query_hash, row, 'text')
    event_data.user_id = self._GetRowValue(query_hash, row, 'user_id')

    timestamp = self._GetRowValue(query_hash, row, 'date')
    if timestamp:
      # Convert the floating point value to an integer.
      timestamp = int(timestamp)
      date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    timestamp = self._GetRowValue(query_hash, row, 'updatedAt')
    if timestamp:
      # Convert the floating point value to an integer.
      timestamp = int(timestamp)
      date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_UPDATE)
      parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 12
0
    def testGetMessages(self):
        """Tests the GetMessages method."""
        date_time = dfdatetime_posix_time.PosixTime()
        date_time.CopyFromString(u'2016-08-09 04:57:14')

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
        event.data_type = u'dpkg:line'
        event.body = u'status half-installed base-passwd:amd64 3.5.33'

        expected_messages = (u'status half-installed base-passwd:amd64 3.5.33',
                             u'status half-installed base-passwd:amd64 3.5.33')
        messages = self._formatter.GetMessages(None, event)
        self.assertEqual(messages, expected_messages)
Exemplo n.º 13
0
  def ParseCloudEntryRow(
      self, parser_mediator, row, cache=None, database=None, query=None,
      **unused_kwargs):
    """Parses a cloud entry row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      cache (SQLiteCache): cache.
      database (SQLiteDatabase): database.
      query (Optional[str]): query.
    """
    # Note that pysqlite does not accept a Unicode string in row['string'] and
    # will raise "IndexError: Index must be int or string".

    cloud_path = self.GetCloudPath(row['parent_resource_id'], cache, database)
    cloud_filename = u'{0:s}{1:s}'.format(cloud_path, row['filename'])

    event_data = GoogleDriveSnapshotCloudEntryEventData()
    event_data.document_type = row['doc_type']
    event_data.path = cloud_filename
    event_data.query = query
    event_data.shared = bool(row['shared'])
    event_data.size = row['size']
    event_data.url = row['url']

    date_time = dfdatetime_posix_time.PosixTime(timestamp=row['modified'])
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    if row['created']:
      date_time = dfdatetime_posix_time.PosixTime(timestamp=row['created'])
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 14
0
    def _ParseLogLine(self, parser_mediator, structure):
        """Parses an event object from the log line.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.
    """
        # Required fields are <mru> and <atime> and we are not interested in
        # log lines without <mru>.
        if not structure.mru:
            return

        event_data = PopularityContestEventData()
        event_data.mru = structure.mru
        event_data.package = structure.package
        event_data.record_tag = structure.tag

        # The <atime> field (as <ctime>) is always present but could be 0.
        # In case of <atime> equal to 0, we are in <NOFILES> case, safely return
        # without logging.
        if structure.atime:
            # TODO: not doing any check on <tag> fields, even if only informative
            # probably it could be better to check for the expected values.
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=structure.atime)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if structure.ctime:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=structure.ctime)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 15
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        installation_time = None
        registry_value = registry_key.GetValueByName('InstallDate')
        if registry_value:
            installation_time = registry_value.GetDataAsObject()

            # TODO: if not present indicate anomaly of missing installation
            # date and time.
            if installation_time is not None:
                event_data = WindowsRegistryInstallationEventData()
                event_data.key_path = registry_key.path

                registry_value = registry_key.GetValueByName(
                    'CurrentBuildNumber')
                if registry_value:
                    event_data.build_number = registry_value.GetDataAsObject()

                registry_value = registry_key.GetValueByName('RegisteredOwner')
                if registry_value:
                    event_data.owner = registry_value.GetDataAsObject()

                registry_value = registry_key.GetValueByName('ProductName')
                if registry_value:
                    event_data.product_name = registry_value.GetDataAsObject()

                registry_value = registry_key.GetValueByName('CSDVersion')
                if registry_value:
                    event_data.service_pack = registry_value.GetDataAsObject()

                registry_value = registry_key.GetValueByName('CurrentVersion')
                if registry_value:
                    event_data.version = registry_value.GetDataAsObject()

                date_time = dfdatetime_posix_time.PosixTime(
                    timestamp=installation_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_INSTALLATION)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        self._ProduceDefaultWindowsRegistryEvent(parser_mediator,
                                                 registry_key,
                                                 names_to_skip=['InstallDate'])
Exemplo n.º 16
0
    def ParseUsersstatusesRow(self,
                              parser_mediator,
                              row,
                              query=None,
                              **unused_kwargs):
        """Parses a contact row from the database.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row resulting from Query.
      Query (Optional[str]): Query.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        event_data = TestEventData()
        event_data.user_id = row['user_id']

        timestamp = row['createdDate']
        if timestamp:
            # Convert the floating point value to an integer.
            timestamp = int(timestamp)
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            # TODO: Add correct time field for None value.  Example: eventdata.EventTimestamp.UPDATE_TIME
            event = time_events.DateTimeValuesEvent(date_time, None)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = row['updatedAt']
        if timestamp:
            # Convert the floating point value to an integer.
            timestamp = int(timestamp)
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            # TODO: Add correct time field for None value.  Example: eventdata.EventTimestamp.UPDATE_TIME
            event = time_events.DateTimeValuesEvent(date_time, None)
            parser_mediator.ProduceEventWithEventData(event, event_data)
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a log record structure and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.

    Raises:
      ParseError: when the structure type is unknown.
    """
        if key not in self._SUPPORTED_KEYS:
            raise errors.ParseError(
                'Unable to parse record, unknown structure: {0:s}'.format(key))

        # TODO: Add anomaly objects for abnormal timestamps, such as when the log
        # timestamp is greater than the session start.
        if key == 'logline':
            self._ParseLogLine(parser_mediator, structure)

        else:
            timestamp = self._GetValueFromStructure(structure, 'timestamp')
            if timestamp is None:
                logger.debug('[{0:s}] {1:s} with invalid timestamp.'.format(
                    self.NAME, key))
                return

            session = self._GetValueFromStructure(structure, 'session')

            event_data = PopularityContestSessionEventData()
            # TODO: determine why session is formatted as a string.
            event_data.session = '{0!s}'.format(session)

            if key == 'header':
                event_data.details = self._GetValueFromStructure(
                    structure, 'details')
                event_data.hostid = self._GetValueFromStructure(
                    structure, 'id')
                event_data.status = 'start'

            elif key == 'footer':
                event_data.status = 'end'

            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ADDED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 18
0
  def testCopyToPosixTimestampWithFractionOfSecond(self):
    """Tests the CopyToPosixTimestampWithFractionOfSecond function."""
    posix_time_object = posix_time.PosixTimeInMicroseconds(
        timestamp=1281643591546875)

    posix_timestamp, fraction_of_second = (
        posix_time_object.CopyToPosixTimestampWithFractionOfSecond())
    self.assertEqual(posix_timestamp, 1281643591)
    self.assertEqual(fraction_of_second, 546875)

    posix_time_object = posix_time.PosixTime()

    posix_timestamp, fraction_of_second = (
        posix_time_object.CopyToPosixTimestampWithFractionOfSecond())
    self.assertIsNone(posix_timestamp)
    self.assertIsNone(fraction_of_second)
Exemplo n.º 19
0
  def change_time(self):
    """dfdatetime.DateTimeValues: change time or None if not available."""
    if self._stat_info is None:
      return None

    # Per Python os.stat() documentation the value of stat_results.st_ctime
    # contains the creation time on Windows.
    if self._OS_IS_WINDOWS:
      return None

    timestamp = getattr(self._stat_info, 'st_ctime_ns', None)
    if timestamp is not None:
      return dfdatetime_posix_time.PosixTimeInNanoseconds(timestamp=timestamp)

    timestamp = int(self._stat_info.st_ctime)
    return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
Exemplo n.º 20
0
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a Portable Executable (PE) file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfVFS.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        pe_data_slice = dfvfs_data_slice.DataSlice(file_object)
        try:
            pefile_object = pefile.PE(data=pe_data_slice, fast_load=True)
            pefile_object.parse_data_directories(
                directories=self._PE_DIRECTORIES)
        except Exception as exception:
            raise errors.UnableToParseFile(
                'Unable to read PE file with error: {0!s}'.format(exception))

        event_data = PEEventData()
        # Note that the result of get_imphash() is an empty string if there is no
        # import hash.
        event_data.imphash = pefile_object.get_imphash() or None
        event_data.pe_type = self._GetPEType(pefile_object)
        event_data.section_names = self._GetSectionNames(pefile_object)

        timestamp = getattr(pefile_object.FILE_HEADER, 'TimeDateStamp', None)
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
        else:
            date_time = dfdatetime_semantic_time.NotSet()

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        self._ParseExportTable(parser_mediator, pefile_object, event_data)

        self._ParseImportTable(parser_mediator, pefile_object, event_data)

        self._ParseLoadConfigurationTable(parser_mediator, pefile_object,
                                          event_data)

        self._ParseDelayImportTable(parser_mediator, pefile_object, event_data)

        self._ParseResourceSection(parser_mediator, pefile_object, event_data)
Exemplo n.º 21
0
    def _ParseInstallDate(self, registry_value):
        """Parses the InstallDate value.

    Args:
      registry_value (dfwinreg.WinRegistryValue): Windows Registry value.

    Returns:
      dfdatetime.PosixTime: installation date and time or None if not available.
    """
        if not registry_value:
            return None

        timestamp = registry_value.GetDataAsObject()
        if not timestamp:
            return dfdatetime_semantic_time.SemanticTime(string='Not set')

        return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
Exemplo n.º 22
0
    def _FormatIntegerAsPosixTime(self, integer):
        """Formats an integer as a POSIX date and time value.

    Args:
      integer (int): integer.

    Returns:
      str: integer formatted as a POSIX date and time value.
    """
        if integer == 0:
            return 'Not set (0)'

        date_time = dfdatetime_posix_time.PosixTime(timestamp=integer)
        date_time_string = date_time.CopyToDateTimeString()
        if not date_time_string:
            return '0x{0:08x}'.format(integer)

        return '{0:s} UTC'.format(date_time_string)
Exemplo n.º 23
0
    def _DebugPrintPosixTimeValue(self, description, value):
        """Prints a POSIX timestamp value for debugging.

    Args:
      description (str): description.
      value (object): value.
    """
        if value == 0:
            date_time_string = 'Not set (0)'
        else:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=value)
            date_time_string = date_time.CopyToDateTimeString()
            if date_time_string:
                date_time_string = '{0:s} UTC'.format(date_time_string)
            else:
                date_time_string = '0x{0:08x}'.format(value)

        self._DebugPrintValue(description, date_time_string)
    def ParseFileObject(self, parser_mediator, file_object):
        """Parses a GPT file-like object.

        Args:
            Parser_mediator (ParserMediator): mediates interactions between parsers
            and other components, such as storage and dfvfs.
            file_object ():

        Raises:
            ParseError: when the file_object is not GPT.
        """
        event_data = GPTEventData()

        try:
            read_file = file_object.read()
            file_size = len(read_file)
            if file_size < 128:
                errors.UnableToParseFile('Not a GPT file')
                return

            entry_signature = self._convert_guid(bytearray(read_file[0:16]))

            if not self._check_entry(entry_signature):
                errors.UnableToParseFile('Not a GPT file')
                return

            if (file_size % 128) != 0:
                return
            entry_data = bytearray(read_file)

            index_number = int(file_size/128)
            for index in range(index_number):
                current_entry = entry_data[index*128:128+index*128]
                current_entry_guid = self._convert_guid(current_entry)
                if not self._check_entry(current_entry_guid):
                    continue
                event_data.drive_signature = '{'+self._convert_guid(current_entry[16:32])+'}'
                date_time = dfdatetime_posix_time.PosixTime(timestamp=0)
                event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        except:
            errors.UnableToParseFile('Not a GPT file')
            return
Exemplo n.º 25
0
    def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
        """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
        values_dict = self._GetValuesFromKey(registry_key)
        installation_time = None
        for name, value in dict(values_dict).items():
            if name.lower() == 'installdate':
                installation_time = value
                del values_dict[name]

        event_data = windows_events.WindowsRegistryEventData()
        event_data.key_path = registry_key.path
        event_data.values = ' '.join([
            '{0:s}: {1!s}'.format(name, value)
            for name, value in sorted(values_dict.items())
        ]) or None

        event = time_events.DateTimeValuesEvent(
            registry_key.last_written_time,
            definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        # TODO: if not present indicate anomaly of missing installation
        # date and time.
        if installation_time is not None:
            event_data = WindowsRegistryInstallationEventData()
            event_data.build_number = values_dict.get('CurrentBuildNumber',
                                                      None)
            event_data.key_path = registry_key.path
            event_data.owner = values_dict.get('RegisteredOwner', None)
            event_data.product_name = values_dict.get('ProductName', None)
            event_data.service_pack = values_dict.get('CSDVersion', None)
            event_data.version = values_dict.get('CurrentVersion', None)

            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=installation_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_INSTALLATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 26
0
    def ParseChat(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a chat message.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
    """
        query_hash = hash(query)

        participants = self._GetRowValue(query_hash, row, 'participants')
        author = self._GetRowValue(query_hash, row, 'author')
        dialog_partner = self._GetRowValue(query_hash, row, 'dialog_partner')
        from_displayname = self._GetRowValue(query_hash, row,
                                             'from_displayname')

        accounts = []
        participants = participants.split(' ')
        for participant in participants:
            if participant != author:
                accounts.append(participant)

        to_account = ', '.join(accounts)
        if not to_account:
            to_account = dialog_partner or 'Unknown User'

        from_account = '{0:s} <{1:s}>'.format(from_displayname, author)

        event_data = SkypeChatEventData()
        event_data.from_account = from_account
        event_data.query = query
        event_data.text = self._GetRowValue(query_hash, row, 'body_xml')
        event_data.title = self._GetRowValue(query_hash, row, 'title')
        event_data.to_account = to_account

        timestamp = self._GetRowValue(query_hash, row, 'timestamp')
        if timestamp:
            date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    'Chat from Skype')
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 27
0
    def _ParseCookieData(self,
                         parser_mediator,
                         cookie_data=None,
                         url=None,
                         **kwargs):
        """Extracts events from cookie data.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      cookie_data (bytes): cookie data.
      url (str): URL or path where the cookie got set.
    """
        fields = cookie_data.split('.')
        number_of_fields = len(fields)

        if number_of_fields != 1:
            parser_mediator.ProduceExtractionWarning(
                'unsupported number of fields: {0:d} in cookie: {1:s}'.format(
                    number_of_fields, self.COOKIE_NAME))
            return

        try:
            # TODO: fix that we're losing precision here use dfdatetime.
            last_visit_posix_time = int(fields[0], 10) / 10000000
        except ValueError:
            last_visit_posix_time = None

        if last_visit_posix_time is not None:
            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=last_visit_posix_time)
            timestamp_description = definitions.TIME_DESCRIPTION_LAST_VISITED
        else:
            date_time = dfdatetime_semantic_time.NotSet()
            timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME

        event_data = GoogleAnalyticsEventData('utmt')
        event_data.cookie_name = self.COOKIE_NAME
        event_data.url = url

        event = time_events.DateTimeValuesEvent(date_time,
                                                timestamp_description)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 28
0
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a log record structure and produces events.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.
    """
        if key not in (u'footer', u'header', u'logline'):
            logging.warning(
                u'PopularityContestParser, unknown structure: {0:s}.'.format(
                    key))
            return

        # TODO: Add anomaly objects for abnormal timestamps, such as when the log
        # timestamp is greater than the session start.
        if key == u'logline':
            self._ParseLogLine(parser_mediator, structure)

        else:
            if not structure.timestamp:
                logging.debug(u'[{0:s}] {1:s} with invalid timestamp.'.format(
                    self.NAME, key))
                return

            event_data = PopularityContestSessionEventData()
            event_data.session = u'{0!s}'.format(structure.session)

            if key == u'header':
                event_data.details = structure.details
                event_data.hostid = structure.id
                event_data.status = u'start'

            elif key == u'footer':
                event_data.status = u'end'

            date_time = dfdatetime_posix_time.PosixTime(
                timestamp=structure.timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ADDED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 29
0
    def DocumentVersionsRow(self,
                            parser_mediator,
                            row,
                            query=None,
                            **unused_kwargs):
        """Parses a document versions row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      query (Optional[str]): query.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        # version_path = "PerUser/UserID/xx/client_id/version_file"
        # where PerUser and UserID are a real directories.
        paths = row['version_path'].split(u'/')
        if len(paths) < 2 or not paths[1].isdigit():
            user_sid = u''
        else:
            user_sid = paths[1]
        version_path = self.ROOT_VERSION_PATH + row['version_path']
        path, _, _ = row['path'].rpartition(u'/')

        event_data = MacDocumentVersionsEventData()
        # TODO: shouldn't this be a separate event?
        event_data.last_time = row['last_time']
        event_data.name = row['name']
        event_data.path = path
        event_data.query = query
        # Note that the user_sid value is expected to be a string.
        event_data.user_sid = u'{0!s}'.format(user_sid)
        event_data.version_path = version_path

        date_time = dfdatetime_posix_time.PosixTime(
            timestamp=row['version_time'])
        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.CREATION_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemplo n.º 30
0
    def ParseRecord(self, parser_mediator, key, structure):
        """Parses a log record structure.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): structure parsed from the log file.
    """
        if key != 'logline':
            logger.warning(
                'Unable to parse record, unknown structure: {0:s}'.format(key))
            return

        timestamp = self._GetValueFromStructure(structure, 'timestamp')
        try:
            timestamp = int(timestamp, 10)
        except (TypeError, ValueError):
            logger.debug(
                'Invalid timestamp {0!s}, skipping record'.format(timestamp))
            return

        try:
            text = self._GetValueFromStructure(structure,
                                               'text',
                                               default_value='')
            nickname, text = self._StripThenGetNicknameAndText(text)
        except pyparsing.ParseException:
            logger.debug('Error parsing entry at offset {0:d}'.format(
                self._offset))
            return

        event_data = XChatScrollbackEventData()
        event_data.nickname = nickname
        event_data.offset = self._offset
        event_data.text = text

        date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_ADDED)
        parser_mediator.ProduceEventWithEventData(event, event_data)