def testProperties(self): """Tests the properties.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) self.assertEqual(posix_time_object.timestamp, 1281643591987654321) posix_time_object = posix_time.PosixTimeInNanoseconds() self.assertIsNone(posix_time_object.timestamp)
def testCopyToDateTimeString(self): """Tests the CopyToDateTimeString function.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) date_time_string = posix_time_object.CopyToDateTimeString() self.assertEqual(date_time_string, '2010-08-12 20:06:31.987654321') posix_time_object = posix_time.PosixTimeInNanoseconds() date_time_string = posix_time_object.CopyToDateTimeString() self.assertIsNone(date_time_string)
def testGetDate(self): """Tests the GetDate function.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) date_tuple = posix_time_object.GetDate() self.assertEqual(date_tuple, (2010, 8, 12)) posix_time_object = posix_time.PosixTimeInNanoseconds() date_tuple = posix_time_object.GetDate() self.assertEqual(date_tuple, (None, None, None))
def testGetTimeOfDay(self): """Tests the GetTimeOfDay function.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) time_of_day_tuple = posix_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (20, 6, 31)) posix_time_object = posix_time.PosixTimeInNanoseconds() time_of_day_tuple = posix_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (None, None, None))
def testGetNormalizedTimestamp(self): """Tests the _GetNormalizedTimestamp function.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) normalized_timestamp = posix_time_object._GetNormalizedTimestamp() self.assertEqual( normalized_timestamp, decimal.Decimal('1281643591.987654321')) posix_time_object = posix_time.PosixTimeInNanoseconds() normalized_timestamp = posix_time_object._GetNormalizedTimestamp() self.assertIsNone(normalized_timestamp)
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" posix_time_object = posix_time.PosixTimeInNanoseconds() posix_time_object.CopyFromDateTimeString('2010-08-12') self.assertEqual(posix_time_object.timestamp, 1281571200000000000) self.assertEqual(posix_time_object._time_zone_offset, 0) posix_time_object.CopyFromDateTimeString('2010-08-12 21:06:31') self.assertEqual(posix_time_object.timestamp, 1281647191000000000) self.assertEqual(posix_time_object._time_zone_offset, 0) posix_time_object.CopyFromDateTimeString('2010-08-12 21:06:31.654321') self.assertEqual(posix_time_object.timestamp, 1281647191654321000) self.assertEqual(posix_time_object._time_zone_offset, 0) posix_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.654321-01:00') self.assertEqual(posix_time_object.timestamp, 1281647191654321000) self.assertEqual(posix_time_object._time_zone_offset, -60) posix_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.654321+01:00') self.assertEqual(posix_time_object.timestamp, 1281647191654321000) self.assertEqual(posix_time_object._time_zone_offset, 60) posix_time_object.CopyFromDateTimeString('1601-01-02 00:00:00') self.assertEqual(posix_time_object.timestamp, -11644387200000000000) self.assertEqual(posix_time_object._time_zone_offset, 0)
def testCopyFromDateTimeString(self): """Tests the CopyFromDateTimeString function.""" posix_time_object = posix_time.PosixTimeInNanoseconds() expected_timestamp = 1281571200000000000 posix_time_object.CopyFromDateTimeString('2010-08-12') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281647191000000000 posix_time_object.CopyFromDateTimeString('2010-08-12 21:06:31') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281647191654321000 posix_time_object.CopyFromDateTimeString('2010-08-12 21:06:31.654321') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281650791654321000 posix_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.654321-01:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281643591654321000 posix_time_object.CopyFromDateTimeString( '2010-08-12 21:06:31.654321+01:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = -11644387200000000000 posix_time_object.CopyFromDateTimeString('1601-01-02 00:00:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp)
def testCopyToDateTimeStringISO8601(self): """Tests the CopyToDateTimeStringISO8601 function.""" posix_time_object = posix_time.PosixTimeInNanoseconds( timestamp=1281643591987654321) date_time_string = posix_time_object.CopyToDateTimeStringISO8601() self.assertEqual(date_time_string, '2010-08-12T20:06:31.987654321+00:00')
def creation_time(self): """dfdatetime.DateTimeValues: creation time or None if not available.""" timestamp = self._fsxfs_file_entry.get_creation_time_as_integer() if timestamp is None: return None return dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=timestamp)
def modification_time(self): """dfdatetime.DateTimeValues: modification time or None if not available.""" if self._stat_info is None: return None timestamp = getattr(self._stat_info, 'st_mtime_ns', None) if timestamp is not None: return dfdatetime_posix_time.PosixTimeInNanoseconds(timestamp=timestamp) timestamp = int(self._stat_info.st_mtime) return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
def change_time(self): """dfdatetime.DateTimeValues: change time or None if not available.""" if self._stat_info is None: return None # Per Python os.stat() documentation the value of stat_results.st_ctime # contains the creation time on Windows. if self._OS_IS_WINDOWS: return None timestamp = getattr(self._stat_info, 'st_ctime_ns', None) if timestamp is not None: return dfdatetime_posix_time.PosixTimeInNanoseconds(timestamp=timestamp) timestamp = int(self._stat_info.st_ctime) return dfdatetime_posix_time.PosixTime(timestamp=timestamp)
def _GetDateTimeFromTimestamp(self, float_value): """Retrieves a date time object from the floating-point timestamp. Args: float_value (float): floating-point timestamp in number of seconds since January 1, 1970 00:00:00 UTC. Returns: dfdatetime.TimeElements: date and time based on the floating-point timestamp. """ integer_value = int(float_value) if integer_value != float_value: integer_value = int(float_value * definitions.NANOSECONDS_PER_SECOND) date_time = dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=integer_value) else: date_time = dfdatetime_posix_time.PosixTime( timestamp=integer_value) return date_time
def _ParseRecord(self, parser_mediator, file_object, record_offset): """Parses a record and produces events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (file): file-like object. record_offset (int): offset of the record relative to the start of the file. Returns: int: next record offset. Raises: ParseError: if the record cannot be parsed. """ record_map = self._GetDataTypeMap('asl_record') try: record, record_data_size = self._ReadStructureFromFileObject( file_object, record_offset, record_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( ('Unable to parse record at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) hostname = self._ParseRecordString(file_object, record.hostname_string_offset) sender = self._ParseRecordString(file_object, record.sender_string_offset) facility = self._ParseRecordString(file_object, record.facility_string_offset) message = self._ParseRecordString(file_object, record.message_string_offset) file_offset = record_offset + record_data_size additional_data_size = record.data_size + 6 - record_data_size if additional_data_size % 8 != 0: raise errors.ParseError( 'Invalid record additional data size: {0:d}.'.format( additional_data_size)) additional_data = self._ReadData(file_object, file_offset, additional_data_size) extra_fields = {} for additional_data_offset in range(0, additional_data_size - 8, 16): record_extra_field = self._ParseRecordExtraField( additional_data[additional_data_offset:], file_offset) file_offset += 16 name = self._ParseRecordString( file_object, record_extra_field.name_string_offset) value = self._ParseRecordString( file_object, record_extra_field.value_string_offset) if name is not None: extra_fields[name] = value # TODO: implement determine previous record offset event_data = ASLEventData() event_data.computer_name = hostname event_data.extra_information = ', '.join([ '{0:s}: {1!s}'.format(name, value) for name, value in sorted(extra_fields.items()) ]) event_data.facility = facility event_data.group_id = record.group_identifier event_data.level = record.alert_level event_data.message_id = record.message_identifier event_data.message = message event_data.pid = record.process_identifier event_data.read_gid = record.real_group_identifier event_data.read_uid = record.real_user_identifier event_data.record_position = record_offset event_data.sender = sender # Note that the user_sid value is expected to be a string. event_data.user_sid = '{0:d}'.format(record.user_identifier) timestamp = ((record.written_time * 1000000000) + record.written_time_nanoseconds) date_time = dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) return record.next_record_offset
def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a locate database (updatedb) file-like object. Args: parser_mediator (ParserMediator): parser mediator. file_object (dfvfs.FileIO): file-like object to be parsed. Raises: UnableToParseFile: when the file cannot be parsed, this will signal the event extractor to apply other parsers. """ locate_database_header_map = self._GetDataTypeMap( 'locate_database_header') try: locate_database_header, file_offset = self._ReadStructureFromFileObject( file_object, 0, locate_database_header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile( 'Unable to parse locate database header with error: {0!s}'. format(exception)) # Skip configuration block for now. file_offset += locate_database_header.configuration_block_size directory_header_map = self._GetDataTypeMap('directory_header') directory_entry_header_map = self._GetDataTypeMap( 'directory_entry_header') cstring_map = self._GetDataTypeMap('cstring') file_size = file_object.get_size() while file_offset + 16 < file_size: try: directory_header, data_size = self._ReadStructureFromFileObject( file_object, file_offset, directory_header_map) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse locate directory header at offset: 0x{0:08x} with ' 'error: {1!s}').format(file_offset, exception)) return file_offset += data_size event_data = LocateDatabaseEvent() event_data.paths = [directory_header.path] timestamp = directory_header.nanoseconds + ( directory_header.seconds * definitions.NANOSECONDS_PER_SECOND) date_time = posix_time.PosixTimeInNanoseconds(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) # TODO: determine why "condition: directory_entry.type != 2" in dtFabric # definitions is currently not working and clean up code once fixed. directory_entry_type = 0 while directory_entry_type != 2: try: directory_entry_header, data_size = self._ReadStructureFromFileObject( file_object, file_offset, directory_entry_header_map) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse locate directory entry header at offset: ' '0x{0:08x} with error: {1!s}').format( file_offset, exception)) return file_offset += data_size directory_entry_type = directory_entry_header.type if directory_entry_type != 2: try: directory_entry_path, data_size = self._ReadStructureFromFileObject( file_object, file_offset, cstring_map) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse locate directory entry path at offset: ' '0x{0:08x} with error: {1!s}').format( file_offset, exception)) return event_data.paths.append(directory_entry_path) file_offset += data_size
def access_time(self): """dfdatetime.DateTimeValues: access time or None if not available.""" timestamp = self._fsxfs_file_entry.get_access_time_as_integer() return dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=timestamp)
def change_time(self): """dfdatetime.DateTimeValues: change time or None if not available.""" timestamp = self._fsext_file_entry.get_inode_change_time_as_integer() return dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=timestamp)
def modification_time(self): """dfdatetime.DateTimeValues: modification time or None if not available.""" timestamp = self._fsext_file_entry.get_modification_time_as_integer() return dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=timestamp)
def ParseRow(self, parser_mediator, row_offset, row): """Parses a line of the log file and produces events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row_offset (int): number of the corresponding line. row (dict[str, str]): fields of a single row, as specified in COLUMNS. """ filename = row.get('name', None) md5_hash = row.get('md5', None) mode = row.get('mode_as_string', None) inode_number = row.get('inode', None) if '-' in inode_number: inode_number, _, _ = inode_number.partition('-') try: inode_number = int(inode_number, 10) except (TypeError, ValueError): inode_number = None data_size = self._GetIntegerValue(row, 'size') user_uid = self._GetIntegerValue(row, 'uid') user_gid = self._GetIntegerValue(row, 'gid') symbolic_link_target = '' if mode and mode[0] == 'l' and ' -> ' in filename: filename, _, symbolic_link_target = filename.rpartition(' -> ') event_data = MactimeEventData() event_data.filename = filename event_data.inode = inode_number event_data.md5 = md5_hash event_data.mode_as_string = mode event_data.offset = row_offset event_data.size = data_size event_data.symbolic_link_target = symbolic_link_target event_data.user_gid = user_gid if user_uid is None: event_data.user_sid = None else: # Note that the user_sid value is expected to be a string. event_data.user_sid = '{0:d}'.format(user_uid) for value_name, timestamp_description in self._TIMESTAMP_DESC_MAP.items( ): posix_time = self._GetFloatingPointValue(row, value_name) # mactime will return 0 if the timestamp is not set. if not posix_time: posix_time = self._GetIntegerValue(row, value_name) if not posix_time: continue if posix_time == 0: date_time = dfdatetime_semantic_time.NotSet() elif isinstance(posix_time, float): posix_time = int(posix_time * definitions.NANOSECONDS_PER_SECOND) date_time = dfdatetime_posix_time.PosixTimeInNanoseconds( timestamp=posix_time) else: date_time = dfdatetime_posix_time.PosixTime( timestamp=posix_time) event = time_events.DateTimeValuesEvent(date_time, timestamp_description) parser_mediator.ProduceEventWithEventData(event, event_data)