def testComparison(self): """Tests the comparison functions.""" never_time_object1 = semantic_time.Never() never_time_object2 = semantic_time.Never() self.assertTrue(never_time_object1 == never_time_object2) self.assertTrue(never_time_object1 >= never_time_object2) self.assertFalse(never_time_object1 > never_time_object2) self.assertTrue(never_time_object1 <= never_time_object2) self.assertFalse(never_time_object1 < never_time_object2) self.assertFalse(never_time_object1 != never_time_object2) semantic_time_object2 = semantic_time.SemanticTime() semantic_time_object2._SORT_ORDER = 1 self.assertFalse(never_time_object1 == semantic_time_object2) self.assertTrue(never_time_object1 >= semantic_time_object2) self.assertTrue(never_time_object1 > semantic_time_object2) self.assertFalse(never_time_object1 <= semantic_time_object2) self.assertFalse(never_time_object1 < semantic_time_object2) self.assertTrue(never_time_object1 != semantic_time_object2) date_time_values1 = interface.TestDateTimeValues() self.assertFalse(never_time_object1 == date_time_values1) self.assertTrue(never_time_object1 >= date_time_values1) self.assertTrue(never_time_object1 > date_time_values1) self.assertFalse(never_time_object1 <= date_time_values1) self.assertFalse(never_time_object1 < date_time_values1) self.assertTrue(never_time_object1 != date_time_values1) with self.assertRaises(ValueError): never_time_object1 == 0.0 # pylint: disable=pointless-statement with self.assertRaises(ValueError): never_time_object1 >= 0.0 # pylint: disable=pointless-statement with self.assertRaises(ValueError): never_time_object1 > 0.0 # pylint: disable=pointless-statement with self.assertRaises(ValueError): never_time_object1 <= 0.0 # pylint: disable=pointless-statement with self.assertRaises(ValueError): never_time_object1 < 0.0 # pylint: disable=pointless-statement with self.assertRaises(ValueError): never_time_object1 != 0.0 # pylint: disable=pointless-statement
def _ParseUrl(self, parser_mediator, format_version, cache_directories, msiecf_item, recovered=False): """Extract data from a MSIE Cache Files (MSIECF) URL item. Every item is stored as an event object, one for each timestamp. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. format_version (str): MSIECF format version. cache_directories (list[str]): cache directory names. msiecf_item (pymsiecf.url): MSIECF URL item. recovered (Optional[bool]): True if the item was recovered. """ # The secondary time can be stored in either UTC or local time # this is dependent on what the index.dat file is used for. # Either the file path or location string can be used to distinguish # between the different type of files. timestamp = msiecf_item.get_primary_time_as_integer() if not timestamp: primary_date_time = dfdatetime_semantic_time.NotSet() else: primary_date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) primary_date_time_description = 'Primary Time' timestamp = msiecf_item.get_secondary_time_as_integer() secondary_date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) secondary_date_time_description = 'Secondary Time' if msiecf_item.type: if msiecf_item.type == 'cache': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'cookie': primary_date_time_description = definitions.TIME_DESCRIPTION_LAST_ACCESS secondary_date_time_description = ( definitions.TIME_DESCRIPTION_MODIFICATION) elif msiecf_item.type == 'history': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) elif msiecf_item.type == 'history-daily': primary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True elif msiecf_item.type == 'history-weekly': primary_date_time_description = definitions.TIME_DESCRIPTION_CREATION secondary_date_time_description = ( definitions.TIME_DESCRIPTION_LAST_VISITED) # The secondary_date_time is in localtime normalize it to be in UTC. secondary_date_time.is_local_time = True http_headers = '' if msiecf_item.type and msiecf_item.data: if msiecf_item.type == 'cache': if msiecf_item.data[:4] == b'HTTP': # Make sure the HTTP headers are ASCII encoded. # TODO: determine correct encoding currently indications that # this could be the system narrow string codepage. try: http_headers = msiecf_item.data[:-1].decode('ascii') except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning(( 'unable to decode HTTP headers of URL record at offset: ' '0x{0:08x}. Characters that cannot be decoded will be ' 'replaced with "?" or "\\ufffd".').format( msiecf_item.offset)) http_headers = msiecf_item.data[:-1].decode( 'ascii', errors='replace') # TODO: parse data of other URL item type like history which requires # OLE VT parsing. event_data = MSIECFURLEventData() event_data.cached_filename = msiecf_item.filename event_data.cached_file_size = msiecf_item.cached_file_size event_data.cache_directory_index = msiecf_item.cache_directory_index event_data.http_headers = http_headers event_data.number_of_hits = msiecf_item.number_of_hits event_data.offset = msiecf_item.offset event_data.recovered = recovered event_data.url = msiecf_item.location if (event_data.cache_directory_index >= 0 and event_data.cache_directory_index < len(cache_directories)): event_data.cache_directory_name = ( cache_directories[event_data.cache_directory_index]) event = time_events.DateTimeValuesEvent(primary_date_time, primary_date_time_description) parser_mediator.ProduceEventWithEventData(event, event_data) if secondary_date_time.timestamp != 0: event = time_events.DateTimeValuesEvent( secondary_date_time, secondary_date_time_description, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) expiration_timestamp = msiecf_item.get_expiration_time_as_integer() if expiration_timestamp != 0: # The expiration time in MSIECF version 4.7 is stored as a FILETIME value # in version 5.2 it is stored as a FAT date time value. # Since the as_integer function returns the raw integer value we need to # apply the right conversion here. if format_version == '4.7': if expiration_timestamp == 0x7fffffffffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_filetime.Filetime( timestamp=expiration_timestamp) else: if expiration_timestamp == 0xffffffff: expiration_date_time = dfdatetime_semantic_time.Never() else: expiration_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=expiration_timestamp) event = time_events.DateTimeValuesEvent( expiration_date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) last_checked_timestamp = msiecf_item.get_last_checked_time_as_integer() if last_checked_timestamp != 0: last_checked_date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=last_checked_timestamp) event = time_events.DateTimeValuesEvent( last_checked_date_time, definitions.TIME_DESCRIPTION_LAST_CHECKED) parser_mediator.ProduceEventWithEventData(event, event_data)
def testInitialize(self): """Tests the __init__ function.""" never_time_object = semantic_time.Never() self.assertEqual(never_time_object.string, 'Never')
def testConvertDateTimeValuesToJSON(self): """Test ConvertDateTimeValuesToJSON function.""" posix_time_object = posix_time.PosixTime(timestamp=1281643591) expected_json_dict = { '__class_name__': 'PosixTime', '__type__': 'DateTimeValues', 'timestamp': 1281643591 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( posix_time_object) self.assertEqual(json_dict, expected_json_dict) posix_time_object.is_local_time = True expected_json_dict = { '__class_name__': 'PosixTime', '__type__': 'DateTimeValues', 'is_local_time': True, 'timestamp': 1281643591 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( posix_time_object) self.assertEqual(json_dict, expected_json_dict) never_time_object = semantic_time.Never() expected_json_dict = { '__class_name__': 'Never', '__type__': 'DateTimeValues', 'string': 'Never' } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( never_time_object) self.assertEqual(json_dict, expected_json_dict) fat_date_time_object = fat_date_time.FATDateTime( fat_date_time=0xa8d03d0c) expected_json_dict = { '__class_name__': 'FATDateTime', '__type__': 'DateTimeValues', 'fat_date_time': 2832219404 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( fat_date_time_object) self.assertEqual(json_dict, expected_json_dict) golang_timestamp = bytes.fromhex('01000000000000000200000003ffff') golang_time_object = golang_time.GolangTime( golang_timestamp=golang_timestamp) expected_json_dict = { '__class_name__': 'GolangTime', '__type__': 'DateTimeValues', 'golang_timestamp': (b'\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\xff\xff'), 'time_zone_offset': 0 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( golang_time_object) self.assertEqual(json_dict, expected_json_dict) rfc2579_date_time_object = rfc2579_date_time.RFC2579DateTime( rfc2579_date_time_tuple=(2010, 8, 12, 20, 6, 31, 6, '+', 2, 0)) expected_json_dict = { '__class_name__': 'RFC2579DateTime', '__type__': 'DateTimeValues', 'rfc2579_date_time_tuple': (2010, 8, 12, 20, 6, 31, 6), 'time_zone_offset': 120 } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( rfc2579_date_time_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElements( time_elements_tuple=(2010, 8, 12, 20, 6, 31)) expected_json_dict = { '__class_name__': 'TimeElements', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElementsInMilliseconds( time_elements_tuple=(2010, 8, 12, 20, 6, 31, 546)) expected_json_dict = { '__class_name__': 'TimeElementsInMilliseconds', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31, 546) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict) time_elements_object = time_elements.TimeElementsInMicroseconds( time_elements_tuple=(2010, 8, 12, 20, 6, 31, 429876)) expected_json_dict = { '__class_name__': 'TimeElementsInMicroseconds', '__type__': 'DateTimeValues', 'time_elements_tuple': (2010, 8, 12, 20, 6, 31, 429876) } json_dict = serializer.Serializer.ConvertDateTimeValuesToJSON( time_elements_object) self.assertEqual(json_dict, expected_json_dict)