def ParseZHTMLSTRINGRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a row from the database. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row resulting from query. """ # Note that pysqlite does not accept a Unicode string in row['string'] and # will raise "IndexError: Index must be int or string". query_hash = hash(query) event_data = MacNotesEventData() zhtmlstring = self._GetRowValue(query_hash, row, 'zhtmlstring') text_extractor = _ZHTMLStringTextExtractor() text = text_extractor.ExtractText(zhtmlstring) event_data.text = text event_data.title = self._GetRowValue(query_hash, row, 'title') timestamp = self._GetRowValue(query_hash, row, 'timestamp') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = self._GetRowValue(query_hash, row, 'last_modified_time') if timestamp: date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_USED) parser_mediator.ProduceEventWithEventData(event, event_data)
def testProperties(self): """Tests the properties.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0) self.assertEqual(cocoa_time_object.timestamp, 395011845.0) cocoa_time_object = cocoa_time.CocoaTime() self.assertIsNone(cocoa_time_object.timestamp)
def KnowledgeCRow( self, parser_mediator, query, row, **unused_kwargs): """Parses KnowledgeC application activity Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) action = self._GetRowValue(query_hash, row, 'action') if action.startswith('/safari/'): event_data = MacKnowledgeCSafariEventData() event_data.url = self._GetRowValue(query_hash, row, 'zvaluestring') event_data.title = self._GetRowValue(query_hash, row, 'title') elif action.startswith('/app/'): event_data = MacKnowledgeCApplicationEventData() event_data.bundle_identifier = self._GetRowValue( query_hash, row, 'zvaluestring') else: # TODO: Add support for additional action types. return entry_creation = self._GetRowValue(query_hash, row, 'entry_creation') activity_starts = self._GetRowValue(query_hash, row, 'start') activity_ends = self._GetRowValue(query_hash, row, 'end') if activity_starts and activity_ends: event_data.duration = activity_ends - activity_starts entry_creation_time = dfdatetime_cocoa_time.CocoaTime( timestamp=entry_creation) entry_creation_event = time_events.DateTimeValuesEvent( entry_creation_time, definitions.TIME_DESCRIPTION_CREATION) activity_starts_time = dfdatetime_cocoa_time.CocoaTime( timestamp=activity_starts) parser_mediator.ProduceEventWithEventData( entry_creation_event, event_data) if activity_starts: activity_starts_event = time_events.DateTimeValuesEvent( activity_starts_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData( activity_starts_event, event_data) if activity_ends: activity_ends_time = dfdatetime_cocoa_time.CocoaTime( timestamp=activity_ends) activity_ends_event = time_events.DateTimeValuesEvent( activity_ends_time, definitions.TIME_DESCRIPTION_END) parser_mediator.ProduceEventWithEventData( activity_ends_event, event_data)
def testGetNormalizedTimestamp(self): """Tests the _GetNormalizedTimestamp function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0) normalized_timestamp = cocoa_time_object._GetNormalizedTimestamp() self.assertEqual(normalized_timestamp, decimal.Decimal(1373319045.0)) cocoa_time_object = cocoa_time.CocoaTime() normalized_timestamp = cocoa_time_object._GetNormalizedTimestamp() self.assertIsNone(normalized_timestamp)
def testCopyToDateTimeString(self): """Tests the CopyToDateTimeString function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) date_time_string = cocoa_time_object.CopyToDateTimeString() self.assertEqual(date_time_string, '2013-07-08 21:30:45.546875') cocoa_time_object = cocoa_time.CocoaTime() date_time_string = cocoa_time_object.CopyToDateTimeString() self.assertIsNone(date_time_string)
def testGetTimeOfDay(self): """Tests the GetTimeOfDay function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) time_of_day_tuple = cocoa_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (21, 30, 45)) cocoa_time_object = cocoa_time.CocoaTime() time_of_day_tuple = cocoa_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (None, None, None))
def testGetDate(self): """Tests the GetDate function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) date_tuple = cocoa_time_object.GetDate() self.assertEqual(date_tuple, (2013, 7, 8)) cocoa_time_object = cocoa_time.CocoaTime() date_tuple = cocoa_time_object.GetDate() self.assertEqual(date_tuple, (None, None, None))
def testGetPlasoTimestamp(self): """Tests the GetPlasoTimestamp function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0) micro_posix_timestamp = cocoa_time_object.GetPlasoTimestamp() self.assertEqual(micro_posix_timestamp, 1373319045000000) cocoa_time_object = cocoa_time.CocoaTime() micro_posix_timestamp = cocoa_time_object.GetPlasoTimestamp() self.assertIsNone(micro_posix_timestamp)
def testGetDateWithTimeOfDay(self): """Tests the GetDateWithTimeOfDay function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) date_with_time_of_day_tuple = cocoa_time_object.GetDateWithTimeOfDay() self.assertEqual(date_with_time_of_day_tuple, (2013, 7, 8, 21, 30, 45)) cocoa_time_object = cocoa_time.CocoaTime() date_with_time_of_day_tuple = cocoa_time_object.GetDateWithTimeOfDay() self.assertEqual(date_with_time_of_day_tuple, (None, None, None, None, None, None))
def testCopyToStatTimeTuple(self): """Tests the CopyToStatTimeTuple function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.0) expected_stat_time_tuple = (1373319045, 0) stat_time_tuple = cocoa_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, expected_stat_time_tuple) cocoa_time_object = cocoa_time.CocoaTime() expected_stat_time_tuple = (None, None) stat_time_tuple = cocoa_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
def ParseMessageRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = KikIOSMessageEventData() event_data.body = self._GetRowValue(query_hash, row, 'ZBODY') event_data.displayname = self._GetRowValue(query_hash, row, 'ZDISPLAYNAME') event_data.message_status = self._GetRowValue(query_hash, row, 'ZSTATE') event_data.message_type = self._GetRowValue(query_hash, row, 'ZTYPE') event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.username = self._GetRowValue(query_hash, row, 'ZUSERNAME') timestamp = self._GetRowValue(query_hash, row, 'ZRECEIVEDTIMESTAMP') # Convert the floating point value to an integer. timestamp = int(timestamp) date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseLSQuarantineRow(self, parser_mediator, row, query=None, **unused_kwargs): """Parses a launch services quarantine event row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row (sqlite3.Row): row. query (Optional[str]): query. """ # Note that pysqlite does not accept a Unicode string in row['string'] and # will raise "IndexError: Index must be int or string". event_data = LsQuarantineEventData() event_data.agent = row['Agent'] event_data.data = row['Data'] event_data.query = query event_data.url = row['URL'] timestamp = row['Time'] date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED) parser_mediator.ProduceEventWithEventData(event, event_data)
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" cocoa_time_object = cocoa_time.CocoaTime() cocoa_time_object.CopyFromDateTimeString('2013-07-08') self.assertEqual(cocoa_time_object._timestamp, 394934400.0) self.assertEqual(cocoa_time_object._time_zone_offset, 0) cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45') self.assertEqual(cocoa_time_object._timestamp, 395011845.0) self.assertEqual(cocoa_time_object._time_zone_offset, 0) cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45.546875') self.assertEqual(cocoa_time_object._timestamp, 395011845.546875) self.assertEqual(cocoa_time_object._time_zone_offset, 0) cocoa_time_object.CopyFromDateTimeString( '2013-07-08 21:30:45.546875-01:00') self.assertEqual(cocoa_time_object._timestamp, 395011845.546875) self.assertEqual(cocoa_time_object._time_zone_offset, -60) cocoa_time_object.CopyFromDateTimeString( '2013-07-08 21:30:45.546875+01:00') self.assertEqual(cocoa_time_object._timestamp, 395011845.546875) self.assertEqual(cocoa_time_object._time_zone_offset, 60) cocoa_time_object.CopyFromDateTimeString('2001-01-02 00:00:00') self.assertEqual(cocoa_time_object._timestamp, 86400.0) self.assertEqual(cocoa_time_object._time_zone_offset, 0)
def ParseMessageRow(self, parser_mediator, row, query=None, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row (sqlite3.Row): row. query (Optional[str]): query. """ # Note that pysqlite does not accept a Unicode string in row['string'] and # will raise "IndexError: Index must be int or string". event_data = IMessageEventData() event_data.attachment_location = row['attachment_location'] event_data.imessage_id = row['imessage_id'] event_data.message_type = row['message_type'] event_data.offset = row['ROWID'] event_data.query = query event_data.read_receipt = row['read_receipt'] event_data.service = row['service'] event_data.text = row['text'] timestamp = row['date'] date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParsePageVisitRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a visited row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) was_http_non_get = self._GetRowValue(query_hash, row, 'http_non_get') event_data = SafariHistoryPageVisitedEventData() event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.title = self._GetRowValue(query_hash, row, 'title') event_data.url = self._GetRowValue(query_hash, row, 'url') event_data.visit_count = self._GetRowValue(query_hash, row, 'visit_count') event_data.was_http_non_get = bool(was_http_non_get) timestamp = self._GetRowValue(query_hash, row, 'visit_time') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" cocoa_time_object = cocoa_time.CocoaTime() expected_timestamp = 394934400.0 cocoa_time_object.CopyFromDateTimeString('2013-07-08') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp) expected_timestamp = 395011845.0 cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp) expected_timestamp = 395011845.546875 cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45.546875') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp) expected_timestamp = 395015445.546875 cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45.546875-01:00') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp) expected_timestamp = 395008245.546875 cocoa_time_object.CopyFromDateTimeString('2013-07-08 21:30:45.546875+01:00') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp) expected_timestamp = 86400.0 cocoa_time_object.CopyFromDateTimeString('2001-01-02 00:00:00') self.assertEqual(cocoa_time_object.timestamp, expected_timestamp)
def ParseMessageRow(self, parser_mediator, row, query=None, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row (sqlite3.Row): row. query (Optional[str]): query. """ # Note that pysqlite does not accept a Unicode string in row['string'] and # will raise "IndexError: Index must be int or string". event_data = KikIOSMessageEventData() event_data.body = row['ZBODY'] event_data.displayname = row['ZDISPLAYNAME'] event_data.message_status = row['ZSTATE'] event_data.message_type = row['ZTYPE'] event_data.offset = row['id'] event_data.query = query event_data.username = row['ZUSERNAME'] # Convert the floating point value to an integer. timestamp = int(row['ZRECEIVEDTIMESTAMP']) date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseMessageRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = IMessageEventData() event_data.attachment_location = self._GetRowValue( query_hash, row, 'attachment_location') event_data.imessage_id = self._GetRowValue(query_hash, row, 'imessage_id') event_data.message_type = self._GetRowValue(query_hash, row, 'message_type') event_data.offset = self._GetRowValue(query_hash, row, 'ROWID') event_data.query = query event_data.read_receipt = self._GetRowValue(query_hash, row, 'read_receipt') event_data.service = self._GetRowValue(query_hash, row, 'service') event_data.text = self._GetRowValue(query_hash, row, 'text') timestamp = self._GetRowValue(query_hash, row, 'date') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def testCopyToDateTimeString(self): """Tests the CopyToDateTimeString function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) date_time_string = cocoa_time_object.CopyToDateTimeString() self.assertEqual(date_time_string, '2013-07-08 21:30:45.546875') epoch_year = cocoa_time_object._EPOCH.year cocoa_time_object._EPOCH.year = -1 with self.assertRaises(ValueError): cocoa_time_object.CopyToDateTimeString() cocoa_time_object._EPOCH.year = epoch_year cocoa_time_object = cocoa_time.CocoaTime() date_time_string = cocoa_time_object.CopyToDateTimeString() self.assertIsNone(date_time_string)
def _ParseMetadataItem(self, parser_mediator, metadata_item): """Parses an Apple Spotlight store metadata item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. metadata_item (SpotlightStoreMetadataItem): a metadata item. """ event_data = SpotlightStoreMetadataItemEventData() # TODO: for identifier 1 extract and process kMDStoreProperties plist # Identifier 1 is used for volume metadata. if metadata_item.identifier > 1: event_data.file_system_identifier = metadata_item.identifier event_data.parent_file_system_identifier = metadata_item.parent_identifier for metadata_attribute_name, attribute_name in ( self._EVENT_DATA_METADATA_ATTRIBUTES): metadata_attribute = metadata_item.attributes.get( metadata_attribute_name, None) if metadata_attribute: setattr(event_data, attribute_name, metadata_attribute.value) for metadata_attribute_name, timestamp_description in ( self._DATE_TIME_METADATA_ATTRIBUTES): metadata_attribute = metadata_item.attributes.get( metadata_attribute_name, None) if metadata_attribute and metadata_attribute.value: if isinstance(metadata_attribute.value, collections.Sequence): timestamps = metadata_attribute.value else: timestamps = [metadata_attribute.value] for timestamp in timestamps: date_time = dfdatetime_cocoa_time.CocoaTime( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, timestamp_description) parser_mediator.ProduceEventWithEventData( event, event_data) if metadata_item.last_update_time == 0: date_time = dfdatetime_semantic_time.NotSet() else: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=metadata_item.last_update_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts Safari history items. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ format_version = match.get('WebHistoryFileVersion', None) if format_version != 1: parser_mediator.ProduceExtractionWarning( 'unsupported Safari history version: {0!s}'.format( format_version)) return if 'WebHistoryDates' not in match: return for history_entry in match.get('WebHistoryDates', {}): last_visited_date = history_entry.get('lastVisitedDate', None) if last_visited_date is None: parser_mediator.ProduceExtractionWarning( 'missing last visited date') continue try: # Last visited date is a string containing a floating point value. timestamp = float(last_visited_date) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'unable to convert last visited date {0:s}'.format( last_visited_date)) continue display_title = history_entry.get('displayTitle', None) event_data = SafariHistoryEventData() if display_title != event_data.title: event_data.display_title = display_title event_data.title = history_entry.get('title', None) event_data.url = history_entry.get('', None) event_data.visit_count = history_entry.get('visitCount', None) event_data.was_http_non_get = history_entry.get( 'lastVisitWasHTTPNonGet', None) # Convert the floating point value to an integer. # TODO: add support for the fractional part of the floating point value. timestamp = int(timestamp) date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseNotificationcenterRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = MacNotificationCenterEventData() event_data.bundle_name = self._GetRowValue(query_hash, row, 'bundle_name') event_data.presented = self._GetRowValue(query_hash, row, 'presented') data_blob = self._GetRowValue(query_hash, row, 'dataBlob') try: property_list = plistlib.loads(data_blob) # req_property is the 'req' dictionary from the plist containing extra # information about the notification entry. req_property = property_list['req'] except (KeyError, plistlib.InvalidFileException) as exception: parser_mediator.ProduceExtractionWarning( 'unable to read plist from database with error: {0!s}'.format( exception)) return event_data.title = req_property.get('titl', None) event_data.subtitle = req_property.get('subt', None) event_data.body = req_property.get('body', None) timestamp = self._GetRowValue(query_hash, row, 'timestamp') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseLSQuarantineRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a launch services quarantine event row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = LsQuarantineEventData() event_data.agent = self._GetRowValue(query_hash, row, 'Agent') event_data.data = self._GetRowValue(query_hash, row, 'Data') event_data.query = query event_data.url = self._GetRowValue(query_hash, row, 'URL') timestamp = self._GetRowValue(query_hash, row, 'Time') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED) parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseRecord(self, parser_mediator, page_data, record_offset): """Parses a record from the page data. Args: parser_mediator (ParserMediator): parser mediator. page_data (bytes): page data. record_offset (int): offset of the record relative to the start of the page. Raises: ParseError: when the record cannot be parsed. """ record_header_map = self._GetDataTypeMap('binarycookies_record_header') try: record_header = self._ReadStructureFromByteStream( page_data[record_offset:], record_offset, record_header_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map record header data at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) event_data = SafariBinaryCookieEventData() event_data.flags = record_header.flags if record_header.url_offset: data_offset = record_offset + record_header.url_offset event_data.url = self._ParseCString(page_data, data_offset) if record_header.name_offset: data_offset = record_offset + record_header.name_offset event_data.cookie_name = self._ParseCString(page_data, data_offset) if record_header.path_offset: data_offset = record_offset + record_header.path_offset event_data.path = self._ParseCString(page_data, data_offset) if record_header.value_offset: data_offset = record_offset + record_header.value_offset event_data.cookie_value = self._ParseCString(page_data, data_offset) if record_header.creation_time: date_time = dfdatetime_cocoa_time.CocoaTime( timestamp=record_header.creation_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) if record_header.expiration_time: date_time = dfdatetime_cocoa_time.CocoaTime( timestamp=record_header.expiration_time) else: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) for plugin in self._cookie_plugins: if parser_mediator.abort: break if event_data.cookie_name != plugin.COOKIE_NAME: continue try: plugin.UpdateChainAndProcess( parser_mediator, cookie_name=event_data.cookie_name, cookie_data=event_data.cookie_value, url=event_data.url) except Exception as exception: # pylint: disable=broad-except parser_mediator.ProduceExtractionError( 'plugin: {0:s} unable to parse cookie with error: {1!s}'.format( plugin.NAME, exception))
def testCopyToDateTimeStringISO8601(self): """Tests the CopyToDateTimeStringISO8601 function.""" cocoa_time_object = cocoa_time.CocoaTime(timestamp=395011845.546875) date_time_string = cocoa_time_object.CopyToDateTimeStringISO8601() self.assertEqual(date_time_string, '2013-07-08T21:30:45.546875+00:00')
def _ParseCookieRecord(self, parser_mediator, page_data, page_offset): """Parses a cookie record Args: parser_mediator (ParserMediator): parser mediator. page_data (bytes): page data. page_offset (int): offset of the cookie record relative to the start of the page. """ try: cookie = self._COOKIE_RECORD.parse(page_data[page_offset:]) except construct.FieldError: message = 'Unable to read cookie record at offset: {0:d}'.format( page_offset) parser_mediator.ProduceExtractionError(message) return # The offset is determined by the range between the start of the current # offset until the start of the next offset. Thus we need to determine # the proper ordering of the offsets, since they are not always in the # same ordering. offset_dict = { cookie.url_offset: 'url', cookie.name_offset: 'name', cookie.value_offset: 'value', cookie.path_offset: 'path' } offsets = sorted(offset_dict.keys()) offsets.append(cookie.size + page_offset) # TODO: Find a better approach to parsing the data than this. data_dict = {} for current_offset in range(0, len(offsets) - 1): # Get the current offset and the offset for the next entry. start, end = offsets[current_offset:current_offset + 2] value = offset_dict.get(offsets[current_offset]) # Read the data. data_all = page_data[start + page_offset:end + page_offset] data, _, _ = data_all.partition(b'\x00') data_dict[value] = data event_data = SafariBinaryCookieEventData() event_data.cookie_name = data_dict.get('name') event_data.cookie_value = data_dict.get('value') event_data.flags = cookie.flags event_data.path = data_dict.get('path') event_data.url = data_dict.get('url') if cookie.creation_date: date_time = dfdatetime_cocoa_time.CocoaTime( timestamp=cookie.creation_date) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) if cookie.expiration_date: date_time = dfdatetime_cocoa_time.CocoaTime( timestamp=cookie.expiration_date) else: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) for plugin in self._cookie_plugins: if parser_mediator.abort: break if event_data.cookie_name != plugin.COOKIE_NAME: continue try: plugin.UpdateChainAndProcess( parser_mediator, cookie_name=event_data.cookie_name, cookie_data=event_data.cookie_value, url=event_data.url) except Exception as exception: # pylint: disable=broad-except parser_mediator.ProduceExtractionError( 'plugin: {0:s} unable to parse cookie with error: {1!s}'. format(plugin.NAME, exception))