def _ParseShellItem(self, parser_mediator, shell_item): """Parses a shell item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. shell_item (pyfwsi.item): shell item. """ path_segment = self._ParseShellItemPathSegment(shell_item) self._path_segments.append(path_segment) event_data = shell_item_events.ShellItemFileEntryEventData() event_data.origin = self._origin event_data.shell_item_path = self.CopyToPath() if isinstance(shell_item, pyfwsi.file_entry): event_data.name = shell_item.name for extension_block in shell_item.extension_blocks: if isinstance(extension_block, pyfwsi.file_entry_extension): long_name = extension_block.long_name localized_name = extension_block.localized_name file_reference = extension_block.file_reference if file_reference: file_reference = '{0:d}-{1:d}'.format( file_reference & 0xffffffffffff, file_reference >> 48) event_data.file_reference = file_reference event_data.localized_name = localized_name event_data.long_name = long_name fat_date_time = extension_block.get_creation_time_as_integer( ) if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData( event, event_data) fat_date_time = extension_block.get_access_time_as_integer( ) if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData( event, event_data) fat_date_time = shell_item.get_modification_time_as_integer() if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseShellItem(self, parser_mediator, shell_item): """Parses a shell item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. shell_item (pyfwsi.item): shell item. """ path_segment = self._ParseShellItemPathSegment(shell_item) self._path_segments.append(path_segment) shell_item_path = self.CopyToPath() if isinstance(shell_item, pyfwsi.file_entry): long_name = u'' localized_name = u'' file_reference = u'' for extension_block in shell_item.extension_blocks: if isinstance(extension_block, pyfwsi.file_entry_extension): long_name = extension_block.long_name localized_name = extension_block.localized_name file_reference = extension_block.file_reference if file_reference: file_reference = u'{0:d}-{1:d}'.format( file_reference & 0xffffffffffff, file_reference >> 48) fat_date_time = extension_block.get_creation_time_as_integer( ) if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = shell_item_events.ShellItemFileEntryEvent( date_time, eventdata.EventTimestamp.CREATION_TIME, shell_item.name, long_name, localized_name, file_reference, shell_item_path, self._origin) parser_mediator.ProduceEvent(event) fat_date_time = extension_block.get_access_time_as_integer( ) if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = shell_item_events.ShellItemFileEntryEvent( date_time, eventdata.EventTimestamp.ACCESS_TIME, shell_item.name, long_name, localized_name, file_reference, shell_item_path, self._origin) parser_mediator.ProduceEvent(event) fat_date_time = shell_item.get_modification_time_as_integer() if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = shell_item_events.ShellItemFileEntryEvent( date_time, eventdata.EventTimestamp.MODIFICATION_TIME, shell_item.name, long_name, localized_name, file_reference, shell_item_path, self._origin) parser_mediator.ProduceEvent(event)
def testGetNormalizedTimestamp(self): """Tests the _GetNormalizedTimestamp function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) normalized_timestamp = fat_date_time_object._GetNormalizedTimestamp() self.assertEqual(normalized_timestamp, 1281647192.0) fat_date_time_object = fat_date_time.FATDateTime() normalized_timestamp = fat_date_time_object._GetNormalizedTimestamp() self.assertIsNone(normalized_timestamp)
def testCopyToDateTimeString(self): """Tests the CopyToDateTimeString function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) date_time_string = fat_date_time_object.CopyToDateTimeString() self.assertEqual(date_time_string, '2010-08-12 21:06:32') fat_date_time_object = fat_date_time.FATDateTime() date_time_string = fat_date_time_object.CopyToDateTimeString() self.assertIsNone(date_time_string)
def testGetTimeOfDay(self): """Tests the GetTimeOfDay function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) time_of_day_tuple = fat_date_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (21, 6, 32)) fat_date_time_object = fat_date_time.FATDateTime() time_of_day_tuple = fat_date_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (None, None, None))
def testGetDate(self): """Tests the GetDate function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) date_tuple = fat_date_time_object.GetDate() self.assertEqual(date_tuple, (2010, 8, 12)) fat_date_time_object = fat_date_time.FATDateTime() date_tuple = fat_date_time_object.GetDate() self.assertEqual(date_tuple, (None, None, None))
def testGetPlasoTimestamp(self): """Tests the GetPlasoTimestamp function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) expected_micro_posix_timestamp = 1281647192000000 micro_posix_timestamp = fat_date_time_object.GetPlasoTimestamp() self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp) fat_date_time_object = fat_date_time.FATDateTime() micro_posix_timestamp = fat_date_time_object.GetPlasoTimestamp() self.assertIsNone(micro_posix_timestamp)
def testCopyToStatTimeTuple(self): """Tests the CopyToStatTimeTuple function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) expected_stat_time_tuple = (1281647192, None) stat_time_tuple = fat_date_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, expected_stat_time_tuple) fat_date_time_object = fat_date_time.FATDateTime() expected_stat_time_tuple = (None, None) stat_time_tuple = fat_date_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, expected_stat_time_tuple)
def testGetDateWithTimeOfDay(self): """Tests the GetDateWithTimeOfDay function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) date_with_time_of_day_tuple = fat_date_time_object.GetDateWithTimeOfDay( ) self.assertEqual(date_with_time_of_day_tuple, (2010, 8, 12, 21, 6, 32)) fat_date_time_object = fat_date_time.FATDateTime() date_with_time_of_day_tuple = fat_date_time_object.GetDateWithTimeOfDay( ) self.assertEqual(date_with_time_of_day_tuple, (None, None, None, None, None, None))
def testGetNumberOfSeconds(self): """Tests the _GetNumberOfSeconds function.""" fat_date_time_object = fat_date_time.FATDateTime() fat_date_time_object._GetNumberOfSeconds(0xa8d03d0c) # Invalid number of seconds. test_fat_date_time = (0xa8d03d0c & ~(0x1f << 16)) | ((30 & 0x1f) << 16) with self.assertRaises(ValueError): fat_date_time_object._GetNumberOfSeconds(test_fat_date_time) # Invalid number of minutes. test_fat_date_time = (0xa8d03d0c & ~(0x3f << 21)) | ((60 & 0x3f) << 21) with self.assertRaises(ValueError): fat_date_time_object._GetNumberOfSeconds(test_fat_date_time) # Invalid number of hours. test_fat_date_time = (0xa8d03d0c & ~(0x1f << 27)) | ((24 & 0x1f) << 27) with self.assertRaises(ValueError): fat_date_time_object._GetNumberOfSeconds(test_fat_date_time) # Invalid day of month. test_fat_date_time = (0xa8d03d0c & ~0x1f) | (32 & 0x1f) with self.assertRaises(ValueError): fat_date_time_object._GetNumberOfSeconds(test_fat_date_time) # Invalid month. test_fat_date_time = (0xa8d03d0c & ~(0x0f << 5)) | ((13 & 0x0f) << 5) with self.assertRaises(ValueError): fat_date_time_object._GetNumberOfSeconds(test_fat_date_time)
def testCopyToDateTimeStringISO8601(self): """Tests the CopyToDateTimeStringISO8601 function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) date_time_string = fat_date_time_object.CopyToDateTimeStringISO8601() self.assertEqual(date_time_string, '2010-08-12T21:06:32Z')
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" fat_date_time_object = fat_date_time.FATDateTime() fat_date_time_object.CopyFromDateTimeString('2010-08-12') self.assertEqual(fat_date_time_object._number_of_seconds, 966038400) self.assertEqual(fat_date_time_object._time_zone_offset, 0) fat_date_time_object.CopyFromDateTimeString('2010-08-12 21:06:31') self.assertEqual(fat_date_time_object._number_of_seconds, 966114391) self.assertEqual(fat_date_time_object._time_zone_offset, 0) fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875') self.assertEqual(fat_date_time_object._number_of_seconds, 966114391) self.assertEqual(fat_date_time_object._time_zone_offset, 0) fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875-01:00') self.assertEqual(fat_date_time_object._number_of_seconds, 966114391) self.assertEqual(fat_date_time_object._time_zone_offset, -60) fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875+01:00') self.assertEqual(fat_date_time_object._number_of_seconds, 966114391) self.assertEqual(fat_date_time_object._time_zone_offset, 60) fat_date_time_object.CopyFromDateTimeString('1980-01-02 00:00:00') self.assertEqual(fat_date_time_object._number_of_seconds, 86400) self.assertEqual(fat_date_time_object._time_zone_offset, 0) with self.assertRaises(ValueError): fat_date_time_object.CopyFromDateTimeString('2200-01-02 00:00:00')
def testGetNormalizedTimestamp(self): """Tests the _GetNormalizedTimestamp function.""" fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) normalized_timestamp = fat_date_time_object._GetNormalizedTimestamp() self.assertEqual(normalized_timestamp, decimal.Decimal('1281647192.0')) fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c, time_zone_offset=60) normalized_timestamp = fat_date_time_object._GetNormalizedTimestamp() self.assertEqual(normalized_timestamp, decimal.Decimal('1281643592.0')) fat_date_time_object = fat_date_time.FATDateTime() normalized_timestamp = fat_date_time_object._GetNormalizedTimestamp() self.assertIsNone(normalized_timestamp)
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" fat_date_time_object = fat_date_time.FATDateTime() expected_number_of_seconds = 966038400 fat_date_time_object.CopyFromDateTimeString('2010-08-12') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) expected_number_of_seconds = 966114391 fat_date_time_object.CopyFromDateTimeString('2010-08-12 21:06:31') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) expected_number_of_seconds = 966114391 fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) expected_number_of_seconds = 966117991 fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875-01:00') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) expected_number_of_seconds = 966110791 fat_date_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.546875+01:00') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) expected_number_of_seconds = 86400 fat_date_time_object.CopyFromDateTimeString('1980-01-02 00:00:00') self.assertEqual(fat_date_time_object._number_of_seconds, expected_number_of_seconds) with self.assertRaises(ValueError): fat_date_time_object.CopyFromDateTimeString('2200-01-02 00:00:00')
def _ParseUrl(self, parser_mediator, format_version, cache_directories, msiecf_item, recovered=False): """Extract data from a MSIE Cache Files (MSIECF) URL item. Every item is stored as an event object, one for each timestamp. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. format_version (str): MSIECF format version. cache_directories (list[str]): cache directory names. msiecf_item (pymsiecf.url): MSIECF URL item. recovered (Optional[bool]): True if the item was recovered. """ # The secondary time can be stored in either UTC or local time # this is dependent on what the index.dat file is used for. # Either the file path or location string can be used to distinguish # between the different type of files. timestamp = msiecf_item.get_primary_time_as_integer() if not timestamp: primary_date_time = dfdatetime_semantic_time.NotSet() else: primary_date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) primary_date_time_description = 'Primary Time' timestamp = msiecf_item.get_secondary_time_as_integer() secondary_date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) secondary_date_time_description = 'Secondary Time' if msiecf_item.type: if msiecf_item.type == 'cache': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'cookie': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'history': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) elif msiecf_item.type == 'history-daily': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True elif msiecf_item.type == 'history-weekly': primary_date_time_description = definitions.TIME_DESCRIPTION_CREATION secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True http_headers = '' if msiecf_item.type and msiecf_item.data: if msiecf_item.type == 'cache': if msiecf_item.data[:4] == b'HTTP': # Make sure the HTTP headers are ASCII encoded. # TODO: determine correct encoding currently indications that # this could be the system narrow string codepage. try: http_headers = msiecf_item.data[:-1].decode('ascii') except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning(( 'unable to decode HTTP headers of URL record at offset: ' '0x{0:08x}. Characters that cannot be decoded will be ' 'replaced with "?" or "\\ufffd".').format( msiecf_item.offset)) http_headers = msiecf_item.data[:-1].decode( 'ascii', errors='replace') # TODO: parse data of other URL item type like history which requires # OLE VT parsing. event_data = MSIECFURLEventData() event_data.cached_filename = msiecf_item.filename event_data.cached_file_size = msiecf_item.cached_file_size event_data.cache_directory_index = msiecf_item.cache_directory_index event_data.http_headers = http_headers event_data.number_of_hits = msiecf_item.number_of_hits event_data.offset = msiecf_item.offset event_data.recovered = recovered event_data.url = msiecf_item.location if (event_data.cache_directory_index >= 0 and event_data.cache_directory_index < len(cache_directories)): event_data.cache_directory_name = ( cache_directories[event_data.cache_directory_index]) event = time_events.DateTimeValuesEvent(primary_date_time, primary_date_time_description) parser_mediator.ProduceEventWithEventData(event, event_data) if secondary_date_time.timestamp != 0: event = time_events.DateTimeValuesEvent( secondary_date_time, secondary_date_time_description, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) expiration_timestamp = msiecf_item.get_expiration_time_as_integer() if expiration_timestamp != 0: # The expiration time in MSIECF version 4.7 is stored as a FILETIME value # in version 5.2 it is stored as a FAT date time value. # Since the as_integer function returns the raw integer value we need to # apply the right conversion here. if format_version == '4.7': if expiration_timestamp == 0x7fffffffffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_filetime.Filetime( timestamp=expiration_timestamp) else: if expiration_timestamp == 0xffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=expiration_timestamp) event = time_events.DateTimeValuesEvent( expiration_date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) last_checked_timestamp = msiecf_item.get_last_checked_time_as_integer() if last_checked_timestamp != 0: last_checked_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=last_checked_timestamp) event = time_events.DateTimeValuesEvent( last_checked_date_time, definitions.TIME_DESCRIPTION_LAST_CHECKED) parser_mediator.ProduceEventWithEventData(event, event_data)
def testConvertDateTimeValuesToJSON(self): """Test ConvertDateTimeValuesToJSON function.""" posix_time_object = posix_time.PosixTime(timestamp=1281643591) expected_json_dict = { '__class_name__': 'PosixTime', '__type__': 'DateTimeValues', 'timestamp': 1281643591 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( posix_time_object) self.assertEqual(json_dict, expected_json_dict) posix_time_object.is_local_time = True expected_json_dict = { '__class_name__': 'PosixTime', '__type__': 'DateTimeValues', 'is_local_time': True, 'timestamp': 1281643591 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( posix_time_object) self.assertEqual(json_dict, expected_json_dict) never_time_object = semantic_time.Never() expected_json_dict = { '__class_name__': 'Never', '__type__': 'DateTimeValues', 'string': 'Never' } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( never_time_object) self.assertEqual(json_dict, expected_json_dict) fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) expected_json_dict = { '__class_name__': 'FATDateTime', '__type__': 'DateTimeValues', 'fat_date_time': 2832219404 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( fat_date_time_object) self.assertEqual(json_dict, expected_json_dict) golang_timestamp = bytes.fromhex('01000000000000000200000003ffff') golang_time_object = golang_time.GolangTime( golang_timestamp=golang_timestamp) expected_json_dict = { '__class_name__': 'GolangTime', '__type__': 'DateTimeValues', 'golang_timestamp': (b'\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\xff\xff'), 'time_zone_offset': 0 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( golang_time_object) self.assertEqual(json_dict, expected_json_dict) rfc2579_date_time_object = rfc2579_date_time.RFC2579DateTime( rfc2579_date_time_tuple=(2010, 8, 12, 20, 6, 31, 6, '+', 2, 0)) expected_json_dict = { '__class_name__': 'RFC2579DateTime', '__type__': 'DateTimeValues', 'rfc2579_date_time_tuple': (2010, 8, 12, 20, 6, 31, 6), 'time_zone_offset': 120 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( rfc2579_date_time_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElements( time_elements_tuple=(2010, 8, 12, 20, 6, 31)) expected_json_dict = { '__class_name__': 'TimeElements', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElementsInMilliseconds( time_elements_tuple=(2010, 8, 12, 20, 6, 31, 546)) expected_json_dict = { '__class_name__': 'TimeElementsInMilliseconds', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31, 546) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElementsInMicroseconds( time_elements_tuple=(2010, 8, 12, 20, 6, 31, 429876)) expected_json_dict = { '__class_name__': 'TimeElementsInMicroseconds', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31, 429876) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict)