def testGetNormalizedTimestamp(self):
        """Tests the _GetNormalizedTimestamp function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        normalized_timestamp = webkit_time_object._GetNormalizedTimestamp()
        self.assertEqual(normalized_timestamp,
                         decimal.Decimal('1281647191.546875'))

        webkit_time_object = webkit_time.WebKitTime(
            time_zone_offset=60, timestamp=12926120791546875)

        normalized_timestamp = webkit_time_object._GetNormalizedTimestamp()
        self.assertEqual(normalized_timestamp,
                         decimal.Decimal('1281643591.546875'))

        webkit_time_object = webkit_time.WebKitTime(
            timestamp=0x1ffffffffffffffff)

        normalized_timestamp = webkit_time_object._GetNormalizedTimestamp()
        self.assertIsNone(normalized_timestamp)

        webkit_time_object = webkit_time.WebKitTime()

        normalized_timestamp = webkit_time_object._GetNormalizedTimestamp()
        self.assertIsNone(normalized_timestamp)
Exemple #2
0
  def testProperties(self):
    """Tests the properties."""
    webkit_time_object = webkit_time.WebKitTime(timestamp=12926120791546875)
    self.assertEqual(webkit_time_object.timestamp, 12926120791546875)

    webkit_time_object = webkit_time.WebKitTime()
    self.assertIsNone(webkit_time_object.timestamp)
Exemple #3
0
  def testCopyToDateTimeString(self):
    """Tests the CopyToDateTimeString function."""
    webkit_time_object = webkit_time.WebKitTime(timestamp=12926120791546875)

    date_time_string = webkit_time_object.CopyToDateTimeString()
    self.assertEqual(date_time_string, '2010-08-12 21:06:31.546875')

    webkit_time_object = webkit_time.WebKitTime()

    date_time_string = webkit_time_object.CopyToDateTimeString()
    self.assertIsNone(date_time_string)
Exemple #4
0
    def testGetDate(self):
        """Tests the GetDate function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        date_tuple = webkit_time_object.GetDate()
        self.assertEqual(date_tuple, (2010, 8, 12))

        webkit_time_object = webkit_time.WebKitTime()

        date_tuple = webkit_time_object.GetDate()
        self.assertEqual(date_tuple, (None, None, None))
Exemple #5
0
    def testGetTimeOfDay(self):
        """Tests the GetTimeOfDay function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        time_of_day_tuple = webkit_time_object.GetTimeOfDay()
        self.assertEqual(time_of_day_tuple, (21, 6, 31))

        webkit_time_object = webkit_time.WebKitTime()

        time_of_day_tuple = webkit_time_object.GetTimeOfDay()
        self.assertEqual(time_of_day_tuple, (None, None, None))
    def testGetDateWithTimeOfDay(self):
        """Tests the GetDateWithTimeOfDay function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        date_with_time_of_day_tuple = webkit_time_object.GetDateWithTimeOfDay()
        self.assertEqual(date_with_time_of_day_tuple, (2010, 8, 12, 21, 6, 31))

        webkit_time_object = webkit_time.WebKitTime()

        date_with_time_of_day_tuple = webkit_time_object.GetDateWithTimeOfDay()
        self.assertEqual(date_with_time_of_day_tuple,
                         (None, None, None, None, None, None))
    def testCopyFromDateTimeString(self):
        """Tests the CopyFromDateTimeString function."""
        webkit_time_object = webkit_time.WebKitTime()

        webkit_time_object.CopyFromDateTimeString('2010-08-12')
        self.assertEqual(webkit_time_object._timestamp, 12926044800000000)
        self.assertEqual(webkit_time_object._time_zone_offset, 0)

        webkit_time_object.CopyFromDateTimeString('2010-08-12 21:06:31')
        self.assertEqual(webkit_time_object._timestamp, 12926120791000000)
        self.assertEqual(webkit_time_object._time_zone_offset, 0)

        webkit_time_object.CopyFromDateTimeString('2010-08-12 21:06:31.546875')
        self.assertEqual(webkit_time_object._timestamp, 12926120791546875)
        self.assertEqual(webkit_time_object._time_zone_offset, 0)

        webkit_time_object.CopyFromDateTimeString(
            '2010-08-12 21:06:31.546875-01:00')
        self.assertEqual(webkit_time_object._timestamp, 12926120791546875)
        self.assertEqual(webkit_time_object._time_zone_offset, -60)

        webkit_time_object.CopyFromDateTimeString(
            '2010-08-12 21:06:31.546875+01:00')
        self.assertEqual(webkit_time_object._timestamp, 12926120791546875)
        self.assertEqual(webkit_time_object._time_zone_offset, 60)

        webkit_time_object.CopyFromDateTimeString('1601-01-02 00:00:00')
        self.assertEqual(webkit_time_object._timestamp, 86400 * 1000000)
        self.assertEqual(webkit_time_object._time_zone_offset, 0)
Exemple #8
0
  def ParseFileDownloadedRow(
      self, parser_mediator, query, row, **unused_kwargs):
    """Parses a file downloaded row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      cache (SQLiteCache): cache which contains cached results from querying
          the downloads table.
      row (sqlite3.Row): row.
    """
    query_hash = hash(query)

    event_data = ChromeHistoryFileDownloadedEventData()
    event_data.full_path = self._GetRowValue(query_hash, row, 'target_path')
    event_data.offset = self._GetRowValue(query_hash, row, 'id')
    event_data.query = query
    event_data.received_bytes = self._GetRowValue(
        query_hash, row, 'received_bytes')
    event_data.total_bytes = self._GetRowValue(query_hash, row, 'total_bytes')
    event_data.url = self._GetRowValue(query_hash, row, 'url')

    timestamp = self._GetRowValue(query_hash, row, 'start_time')
    date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #9
0
    def ParseActivityLogUncompressedRow(self,
                                        parser_mediator,
                                        row,
                                        query=None,
                                        **unused_kwargs):
        """Parses an activity log row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      query (Optional[str]): query.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        event_data = ChromeExtensionActivityEventData()
        event_data.action_type = row['action_type']
        event_data.activity_id = row['activity_id']
        event_data.api_name = row['api_name']
        event_data.arg_url = row['arg_url']
        event_data.args = row['args']
        event_data.extension_id = row['extension_id']
        event_data.other = row['other']
        event_data.page_title = row['page_title']
        event_data.page_url = row['page_url']
        event_data.query = query

        timestamp = row['time']
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_UNKNOWN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #10
0
    def testCopyToDateTimeStringISO8601(self):
        """Tests the CopyToDateTimeStringISO8601 function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        date_time_string = webkit_time_object.CopyToDateTimeStringISO8601()
        self.assertEqual(date_time_string, '2010-08-12T21:06:31.546875Z')
    def ParseActivityLogUncompressedRow(self, parser_mediator, query, row,
                                        **unused_kwargs):
        """Parses an activity log row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
    """
        query_hash = hash(query)

        event_data = ChromeExtensionActivityEventData()
        event_data.action_type = self._GetRowValue(query_hash, row,
                                                   'action_type')
        event_data.activity_id = self._GetRowValue(query_hash, row,
                                                   'activity_id')
        event_data.api_name = self._GetRowValue(query_hash, row, 'api_name')
        event_data.arg_url = self._GetRowValue(query_hash, row, 'arg_url')
        event_data.args = self._GetRowValue(query_hash, row, 'args')
        event_data.extension_id = self._GetRowValue(query_hash, row,
                                                    'extension_id')
        event_data.other = self._GetRowValue(query_hash, row, 'other')
        event_data.page_title = self._GetRowValue(query_hash, row,
                                                  'page_title')
        event_data.page_url = self._GetRowValue(query_hash, row, 'page_url')
        event_data.query = query

        timestamp = self._GetRowValue(query_hash, row, 'time')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_UNKNOWN)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #12
0
  def ParseNewFileDownloadedRow(
      self, parser_mediator, row, query=None, **unused_kwargs):
    """Parses a file downloaded row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      query (Optional[str]): query.
    """
    # Note that pysqlite does not accept a Unicode string in row['string'] and
    # will raise "IndexError: Index must be int or string".

    event_data = ChromeHistoryFileDownloadedEventData()
    event_data.full_path = row['target_path']
    event_data.offset = row['id']
    event_data.query = query
    event_data.received_bytes = row['received_bytes']
    event_data.total_bytes = row['total_bytes']
    event_data.url = row['url']

    timestamp = row['start_time']
    date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, eventdata.EventTimestamp.FILE_DOWNLOADED)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #13
0
    def testCopyFromDateTimeString(self):
        """Tests the CopyFromDateTimeString function."""
        webkit_time_object = webkit_time.WebKitTime()

        expected_timestamp = 12926044800000000
        webkit_time_object.CopyFromDateTimeString('2010-08-12')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)

        expected_timestamp = 12926120791000000
        webkit_time_object.CopyFromDateTimeString('2010-08-12 21:06:31')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)

        expected_timestamp = 12926120791546875
        webkit_time_object.CopyFromDateTimeString('2010-08-12 21:06:31.546875')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)

        expected_timestamp = 12926124391546875
        webkit_time_object.CopyFromDateTimeString(
            '2010-08-12 21:06:31.546875-01:00')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)

        expected_timestamp = 12926117191546875
        webkit_time_object.CopyFromDateTimeString(
            '2010-08-12 21:06:31.546875+01:00')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)

        expected_timestamp = 86400 * 1000000
        webkit_time_object.CopyFromDateTimeString('1601-01-02 00:00:00')
        self.assertEqual(webkit_time_object.timestamp, expected_timestamp)
Exemple #14
0
  def ParseLastVisitedRow(
      self, parser_mediator, row, cache=None, database=None, query=None,
      **unused_kwargs):
    """Parses a last visited row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      row (sqlite3.Row): row.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.
      query (Optional[str]): query.
    """
    # Note that pysqlite does not accept a Unicode string in row['string'] and
    # will raise "IndexError: Index must be int or string".

    extras = []

    transition_nr = row['transition'] & self.CORE_MASK
    page_transition = self.PAGE_TRANSITION.get(transition_nr, u'')
    if page_transition:
      extras.append(u'Type: [{0:s} - {1:s}]'.format(
          page_transition, self.TRANSITION_LONGER.get(transition_nr, u'')))

    if row['hidden'] == u'1':
      extras.append(u'(url hidden)')

    # TODO: move to formatter.
    count = row['typed_count']
    if count >= 1:
      if count > 1:
        multi = u's'
      else:
        multi = u''

      extras.append(u'(type count {1:d} time{0:s})'.format(multi, count))
    else:
      extras.append(u'(URL not typed directly - no typed count)')

    visit_source = self._GetVisitSource(row['visit_id'], cache, database)

    # TODO: replace extras by conditional formatting.
    event_data = ChromeHistoryPageVisitedEventData()
    event_data.extra = u' '.join(extras)
    event_data.from_visit = self._GetUrl(row['from_visit'], cache, database)
    event_data.host = self._GetHostname(row['url'])
    event_data.offset = row['id']
    event_data.query = query
    event_data.title = row['title']
    event_data.typed_count = row['typed_count']
    event_data.url = row['url']
    event_data.visit_source = visit_source

    timestamp = row['visit_time']
    date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, eventdata.EventTimestamp.PAGE_VISITED)
    parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #15
0
    def _ParseCacheEntries(self, parser_mediator, index_file,
                           data_block_files):
        """Parses Chrome Cache file entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      index_file (IndexFile): Chrome cache index file.
      data_block_files (dict[str: DataBlockFile]): data block files lookup table
          which contains data block files.
    """
        # Parse the cache entries in the data block files.
        for cache_address in index_file.index_table:
            cache_address_chain_length = 0
            while cache_address.value != 0x00000000:
                if cache_address_chain_length >= 64:
                    parser_mediator.ProduceExtractionError(
                        u'Maximum allowed cache address chain length reached.')
                    break

                data_file = data_block_files.get(cache_address.filename, None)
                if not data_file:
                    message = u'Cache address: 0x{0:08x} missing data file.'.format(
                        cache_address.value)
                    parser_mediator.ProduceExtractionError(message)
                    break

                try:
                    cache_entry = data_file.ReadCacheEntry(
                        cache_address.block_offset)
                except (IOError, UnicodeDecodeError) as exception:
                    parser_mediator.ProduceExtractionError(
                        u'Unable to parse cache entry with error: {0:s}'.
                        format(exception))
                    break

                try:
                    original_url = cache_entry.key.decode(u'ascii')
                except UnicodeDecodeError:
                    original_url = cache_entry.key.decode(u'ascii',
                                                          errors=u'replace')
                    parser_mediator.ProduceExtractionError((
                        u'unable to decode cache entry key at cache address: '
                        u'0x{0:08x}. Characters that cannot be decoded will be '
                        u'replaced with "?" or "\\ufffd".').format(
                            cache_address.value))

                event_data = ChromeCacheEntryEventData()
                event_data.original_url = original_url

                date_time = dfdatetime_webkit_time.WebKitTime(
                    timestamp=cache_entry.creation_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, eventdata.EventTimestamp.PAGE_VISITED)
                parser_mediator.ProduceEventWithEventData(event, event_data)

                cache_address = cache_entry.next
                cache_address_chain_length += 1
Exemple #16
0
    def testGetPlasoTimestamp(self):
        """Tests the GetPlasoTimestamp function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        expected_micro_posix_timestamp = 1281647191546875
        micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
        self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp)

        webkit_time_object = webkit_time.WebKitTime(
            timestamp=0x1ffffffffffffffff)

        micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
        self.assertIsNone(micro_posix_timestamp)

        webkit_time_object = webkit_time.WebKitTime()

        micro_posix_timestamp = webkit_time_object.GetPlasoTimestamp()
        self.assertIsNone(micro_posix_timestamp)
Exemple #17
0
    def testCopyToStatTimeTuple(self):
        """Tests the CopyToStatTimeTuple function."""
        webkit_time_object = webkit_time.WebKitTime(
            timestamp=12926120791546875)

        expected_stat_time_tuple = (1281647191, 5468750)
        stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
        self.assertEqual(stat_time_tuple, expected_stat_time_tuple)

        webkit_time_object = webkit_time.WebKitTime(
            timestamp=0x1ffffffffffffffff)

        expected_stat_time_tuple = (None, None)
        stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
        self.assertEqual(stat_time_tuple, expected_stat_time_tuple)

        webkit_time_object = webkit_time.WebKitTime()

        expected_stat_time_tuple = (None, None)
        stat_time_tuple = webkit_time_object.CopyToStatTimeTuple()
        self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
Exemple #18
0
    def _ParseCacheEntries(self, parser_mediator, index_table,
                           data_block_files):
        """Parses Chrome Cache file entries.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      index_table (list[CacheAddress]): the cache addresses which are stored in
          the index file.
      data_block_files (dict[str: file]): look up table for the data block
          file-like object handles.
    """
        # Parse the cache entries in the data block files.
        for cache_address in index_table:
            cache_address_chain_length = 0
            while cache_address.value != 0:
                if cache_address_chain_length >= 64:
                    parser_mediator.ProduceExtractionWarning(
                        'Maximum allowed cache address chain length reached.')
                    break

                data_block_file_object = data_block_files.get(
                    cache_address.filename, None)
                if not data_block_file_object:
                    message = 'Cache address: 0x{0:08x} missing data file.'.format(
                        cache_address.value)
                    parser_mediator.ProduceExtractionWarning(message)
                    break

                try:
                    cache_entry = self._data_block_file_parser.ParseCacheEntry(
                        data_block_file_object, cache_address.block_offset)
                except (IOError, errors.ParseError) as exception:
                    parser_mediator.ProduceExtractionWarning(
                        'Unable to parse cache entry with error: {0!s}'.format(
                            exception))
                    break

                event_data = ChromeCacheEntryEventData()
                event_data.original_url = cache_entry.original_url

                date_time = dfdatetime_webkit_time.WebKitTime(
                    timestamp=cache_entry.creation_time)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
                parser_mediator.ProduceEventWithEventData(event, event_data)

                cache_address = cache_entry.next
                cache_address_chain_length += 1
Exemple #19
0
    def ParseLastVisitedRow(self,
                            parser_mediator,
                            query,
                            row,
                            cache=None,
                            database=None,
                            **unused_kwargs):
        """Parses a last visited row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
      cache (SQLiteCache): cache which contains cached results from querying
          the visits and urls tables.
      database (Optional[SQLiteDatabase]): database.
    """
        query_hash = hash(query)

        hidden = self._GetRowValue(query_hash, row, 'hidden')
        transition = self._GetRowValue(query_hash, row, 'transition')

        visit_identifier = self._GetRowValue(query_hash, row, 'visit_id')
        from_visit = self._GetRowValue(query_hash, row, 'from_visit')

        event_data = ChromeHistoryPageVisitedEventData()
        event_data.from_visit = self._GetUrl(from_visit, cache, database)
        event_data.offset = self._GetRowValue(query_hash, row, 'id')
        event_data.query = query
        event_data.page_transition_type = (transition
                                           & self._PAGE_TRANSITION_CORE_MASK)
        event_data.title = self._GetRowValue(query_hash, row, 'title')
        event_data.typed_count = self._GetRowValue(query_hash, row,
                                                   'typed_count')
        event_data.url = self._GetRowValue(query_hash, row, 'url')
        event_data.url_hidden = hidden == '1'
        event_data.visit_source = self._GetVisitSource(visit_identifier, cache,
                                                       database)

        timestamp = self._GetRowValue(query_hash, row, 'visit_time')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #20
0
    def _ExtractExtensionInstallEvents(self, settings_dict, parser_mediator):
        """Extract extension installation events.

    Args:
      settings_dict (dict[str: object]): settings data from a Preferences file.
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
    """
        for extension_id, extension in sorted(settings_dict.items()):
            install_time = extension.get('install_time', None)
            if not install_time:
                parser_mediator.ProduceExtractionError(
                    'installation time missing for extension ID {0:s}'.format(
                        extension_id))
                continue

            try:
                install_time = int(install_time, 10)
            except ValueError:
                parser_mediator.ProduceExtractionError(
                    ('unable to convert installation time for extension ID '
                     '{0:s}').format(extension_id))
                continue

            manifest = extension.get('manifest', None)
            if not manifest:
                parser_mediator.ProduceExtractionError(
                    'manifest missing for extension ID {0:s}'.format(
                        extension_id))
                continue

            event_data = ChromeExtensionInstallationEventData()
            event_data.extension_id = extension_id
            event_data.extension_name = manifest.get('name', None)
            event_data.path = extension.get('path', None)

            date_time = dfdatetime_webkit_time.WebKitTime(
                timestamp=install_time)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_ADDED)
            parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #21
0
    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Chrome preferences file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        # First pass check for initial character being open brace.
        if file_object.read(1) != b'{':
            raise errors.UnableToParseFile(
                ('[{0:s}] {1:s} is not a valid Preference file, '
                 'missing opening brace.').format(
                     self.NAME, parser_mediator.GetDisplayName()))

        file_object.seek(0, os.SEEK_SET)

        # Second pass to verify it's valid JSON
        try:
            json_dict = json.load(file_object)
        except ValueError as exception:
            raise errors.UnableToParseFile(
                ('[{0:s}] Unable to parse file {1:s} as JSON: {2:s}').format(
                    self.NAME, parser_mediator.GetDisplayName(), exception))
        except IOError as exception:
            raise errors.UnableToParseFile(
                ('[{0:s}] Unable to open file {1:s} for parsing as'
                 'JSON: {2:s}').format(self.NAME,
                                       parser_mediator.GetDisplayName(),
                                       exception))

        # Third pass to verify the file has the correct keys in it for Preferences
        if not set(self.REQUIRED_KEYS).issubset(set(json_dict.keys())):
            raise errors.UnableToParseFile(
                'File does not contain Preference data.')

        extensions_setting_dict = json_dict.get('extensions')
        if not extensions_setting_dict:
            raise errors.UnableToParseFile(
                '[{0:s}] {1:s} is not a valid Preference file, '
                'does not contain extensions value.'.format(
                    self.NAME, parser_mediator.GetDisplayName()))

        extensions_dict = extensions_setting_dict.get('settings')
        if not extensions_dict:
            raise errors.UnableToParseFile(
                '[{0:s}] {1:s} is not a valid Preference file, '
                'does not contain extensions settings value.'.format(
                    self.NAME, parser_mediator.GetDisplayName()))

        extensions_autoupdate_dict = extensions_setting_dict.get('autoupdate')
        if extensions_autoupdate_dict:
            autoupdate_lastcheck_timestamp = extensions_autoupdate_dict.get(
                'last_check', None)

            if autoupdate_lastcheck_timestamp:
                autoupdate_lastcheck = int(autoupdate_lastcheck_timestamp, 10)

                event_data = ChromeExtensionsAutoupdaterEventData()
                event_data.message = 'Chrome extensions autoupdater last run'

                date_time = dfdatetime_webkit_time.WebKitTime(
                    timestamp=autoupdate_lastcheck)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_ADDED)
                parser_mediator.ProduceEventWithEventData(event, event_data)

            autoupdate_nextcheck_timestamp = extensions_autoupdate_dict.get(
                'next_check', None)
            if autoupdate_nextcheck_timestamp:
                autoupdate_nextcheck = int(autoupdate_nextcheck_timestamp, 10)

                event_data = ChromeExtensionsAutoupdaterEventData()
                event_data.message = 'Chrome extensions autoupdater next run'

                date_time = dfdatetime_webkit_time.WebKitTime(
                    timestamp=autoupdate_nextcheck)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_ADDED)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        browser_dict = json_dict.get('browser', None)
        if browser_dict and 'last_clear_browsing_data_time' in browser_dict:
            last_clear_history_timestamp = browser_dict.get(
                'last_clear_browsing_data_time', None)

            if last_clear_history_timestamp:
                last_clear_history = int(last_clear_history_timestamp, 10)

                event_data = ChromeExtensionsAutoupdaterEventData()
                event_data.message = 'Chrome history was cleared by user'

                date_time = dfdatetime_webkit_time.WebKitTime(
                    timestamp=last_clear_history)
                event = time_events.DateTimeValuesEvent(
                    date_time, definitions.TIME_DESCRIPTION_DELETED)
                parser_mediator.ProduceEventWithEventData(event, event_data)

        self._ExtractExtensionInstallEvents(extensions_dict, parser_mediator)

        profile_dict = json_dict.get('profile', None)
        if profile_dict:
            content_settings_dict = profile_dict.get('content_settings', None)
            if content_settings_dict:
                exceptions_dict = content_settings_dict.get('exceptions', None)
                if exceptions_dict:
                    self._ExtractContentSettingsExceptions(
                        exceptions_dict, parser_mediator)
Exemple #22
0
    def ParseLastVisitedRow(self,
                            parser_mediator,
                            query,
                            row,
                            cache=None,
                            database=None,
                            **unused_kwargs):
        """Parses a last visited row.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row.
      cache (Optional[SQLiteCache]): cache.
      database (Optional[SQLiteDatabase]): database.
    """
        query_hash = hash(query)

        extras = []

        hidden = self._GetRowValue(query_hash, row, 'hidden')
        transition = self._GetRowValue(query_hash, row, 'transition')

        transition_nr = transition & self.CORE_MASK
        page_transition = self.PAGE_TRANSITION.get(transition_nr, '')
        if page_transition:
            extras.append('Type: [{0:s} - {1:s}]'.format(
                page_transition, self.TRANSITION_LONGER.get(transition_nr,
                                                            '')))

        if hidden == '1':
            extras.append('(url hidden)')

        # TODO: move to formatter.
        count = self._GetRowValue(query_hash, row, 'typed_count')
        if count >= 1:
            if count > 1:
                multi = 's'
            else:
                multi = ''

            extras.append('(type count {1:d} time{0:s})'.format(multi, count))
        else:
            extras.append('(URL not typed directly - no typed count)')

        visit_id = self._GetRowValue(query_hash, row, 'visit_id')
        from_visit = self._GetRowValue(query_hash, row, 'from_visit')
        url = self._GetRowValue(query_hash, row, 'url')

        visit_source = self._GetVisitSource(visit_id, cache, database)

        # TODO: replace extras by conditional formatting.
        event_data = ChromeHistoryPageVisitedEventData()
        event_data.extra = ' '.join(extras)
        event_data.from_visit = self._GetUrl(from_visit, cache, database)
        event_data.host = self._GetHostname(url)
        event_data.offset = self._GetRowValue(query_hash, row, 'id')
        event_data.query = query
        event_data.title = self._GetRowValue(query_hash, row, 'title')
        event_data.typed_count = self._GetRowValue(query_hash, row,
                                                   'typed_count')
        event_data.url = self._GetRowValue(query_hash, row, 'url')
        event_data.visit_source = visit_source

        timestamp = self._GetRowValue(query_hash, row, 'visit_time')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Exemple #23
0
    def ParseCookieRow(self,
                       parser_mediator,
                       row,
                       query=None,
                       **unused_kwargs):
        """Parses a cookie row.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      row (sqlite3.Row): row resulting from the query.
      query (Optional[str]): query string.
    """
        # Note that pysqlite does not accept a Unicode string in row['string'] and
        # will raise "IndexError: Index must be int or string".

        cookie_name = row['name']
        cookie_data = row['value']

        hostname = row['host_key']
        if hostname.startswith('.'):
            hostname = hostname[1:]

        if row['secure']:
            scheme = u'https'
        else:
            scheme = u'http'

        url = u'{0:s}://{1:s}{2:s}'.format(scheme, hostname, row['path'])

        event_data = ChromeCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_data
        event_data.host = hostname
        event_data.httponly = True if row['httponly'] else False
        event_data.path = row['path']
        event_data.persistent = True if row['persistent'] else False
        event_data.query = query
        event_data.secure = True if row['secure'] else False
        event_data.url = url

        timestamp = row['creation_utc']
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.CREATION_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = row['last_access_utc']
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.ACCESS_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        if row['has_expires']:
            timestamp = row['expires_utc']
            date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(date_time,
                                                    u'Cookie Expires')
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for plugin in self._cookie_plugins:
            if cookie_name != plugin.COOKIE_NAME:
                continue

            try:
                plugin.UpdateChainAndProcess(parser_mediator,
                                             cookie_data=cookie_data,
                                             cookie_name=cookie_name,
                                             url=url)

            except Exception as exception:  # pylint: disable=broad-except
                parser_mediator.ProduceExtractionError(
                    u'plugin: {0:s} unable to parse cookie with error: {1:s}'.
                    format(plugin.NAME, exception))
Exemple #24
0
    def ParseCookieRow(self, parser_mediator, query, row, **unused_kwargs):
        """Parses a cookie row.

    Args:
      parser_mediator (ParserMediator): parser mediator.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from the query.
    """
        query_hash = hash(query)

        cookie_name = self._GetRowValue(query_hash, row, 'name')
        cookie_data = self._GetRowValue(query_hash, row, 'value')

        hostname = self._GetRowValue(query_hash, row, 'host_key')
        if hostname.startswith('.'):
            hostname = hostname[1:]

        httponly = self._GetRowValue(query_hash, row, 'httponly')
        path = self._GetRowValue(query_hash, row, 'path')
        persistent = self._GetRowValue(query_hash, row, 'persistent')
        secure = self._GetRowValue(query_hash, row, 'secure')

        if secure:
            scheme = 'https'
        else:
            scheme = 'http'

        url = '{0:s}://{1:s}{2:s}'.format(scheme, hostname, path)

        event_data = ChromeCookieEventData()
        event_data.cookie_name = cookie_name
        event_data.data = cookie_data
        event_data.host = hostname
        event_data.httponly = bool(httponly)
        event_data.path = path
        event_data.persistent = bool(persistent)
        event_data.query = query
        event_data.secure = bool(secure)
        event_data.url = url

        timestamp = self._GetRowValue(query_hash, row, 'creation_utc')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_CREATION)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'last_access_utc')
        date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
        parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = self._GetRowValue(query_hash, row, 'expires_utc')
        if timestamp:
            date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for plugin in self._cookie_plugins:
            if cookie_name != plugin.COOKIE_NAME:
                continue

            try:
                plugin.UpdateChainAndProcess(parser_mediator,
                                             cookie_data=cookie_data,
                                             cookie_name=cookie_name,
                                             url=url)

            except Exception as exception:  # pylint: disable=broad-except
                parser_mediator.ProduceExtractionError(
                    'plugin: {0:s} unable to parse cookie with error: {1!s}'.
                    format(plugin.NAME, exception))