def testParseFileObject(self): """Tests the ParseFileObject function.""" storage_writer = self._CreateStorageWriter() parser_mediator = self._CreateParserMediator(storage_writer) resolver_context = dfvfs_context.Context() test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is another file.\nWith two lines.\n' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_parser.ParseFileObject(parser_mediator, file_object) self.assertEqual(storage_writer.number_of_warnings, 0) # The test parser does not generate events. self.assertEqual(storage_writer.number_of_events, 0) storage_writer = self._CreateStorageWriter() parser_mediator = self._CreateParserMediator(storage_writer) test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is another file.\nWith tw\xba lines.\n' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_parser.ParseFileObject(parser_mediator, file_object) self.assertEqual(storage_writer.number_of_warnings, 1) # The test parser does not generate events. self.assertEqual(storage_writer.number_of_events, 0)
def testReadLine(self): """Tests the _ReadLine function.""" resolver_context = dfvfs_context.Context() test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is another file.' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_text_file = dfvfs_text_file.TextFile(file_object, encoding='utf-8') line = test_parser._ReadLine(test_text_file) self.assertEqual(line, 'This is another file.') test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is an\xbather file.' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_text_file = dfvfs_text_file.TextFile(file_object, encoding='utf8') with self.assertRaises(UnicodeDecodeError): test_parser._ReadLine(test_text_file) test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is an\xbather file.' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_text_file = dfvfs_text_file.TextFile(file_object, encoding='utf8', encoding_errors='replace') line = test_parser._ReadLine(test_text_file) self.assertEqual(line, 'This is an\ufffdther file.') self._encoding_errors = [] codecs.register_error('test_handler', self._EncodingErrorHandler) test_path_spec = fake_path_spec.FakePathSpec(location='/file.txt') data = b'This is an\xbather file.' file_object = fake_file_io.FakeFile(resolver_context, test_path_spec, data) file_object.Open() test_parser = TestPyparsingSingleLineTextParser() test_text_file = dfvfs_text_file.TextFile( file_object, encoding='utf8', encoding_errors='test_handler') line = test_parser._ReadLine(test_text_file) self.assertEqual(line, 'This is an\\xbather file.') self.assertEqual(len(self._encoding_errors), 1) self.assertEqual(self._encoding_errors[0], (10, 0xba))
def testOpenClosePathSpec(self): """Test the Open and Close functions with a path specification.""" test_file = '/test_data/password.txt' test_path_spec = fake_path_spec.FakePathSpec(location=test_file) file_object = fake_file_io.FakeFile(self._resolver_context, test_path_spec, self._FILE_DATA1) file_object.Open() self.assertEqual(file_object.get_size(), 116) # Test file without file data with self.assertRaises(TypeError): file_object = fake_file_io.FakeFile(self._resolver_context, test_path_spec, None) file_object.Open()
def GetFileObject(self, data_stream_name=''): """Retrieves the file-like object. Args: data_stream_name (Optional[str]): name of the data stream, where an empty string represents the default data stream. Returns: FakeFileIO: a file-like object or None if not available. Raises: IOError: if the file entry is not a file. OSError: if the file entry is not a file. """ if not self.IsFile(): raise IOError('Cannot open non-file.') if data_stream_name: return None location = getattr(self.path_spec, 'location', None) if location is None: return None file_data = self._file_system.GetDataByPath(location) file_object = fake_file_io.FakeFile(self._resolver_context, file_data) file_object.open(path_spec=self.path_spec) return file_object
def testCacheFileObject(self): """Tests the cache file-like object functionality.""" resolver_context = context.Context() # pylint: disable=protected-access self.assertEqual(len(resolver_context._file_object_cache._values), 0) path_spec = fake_path_spec.FakePathSpec(location='/empty.txt') file_object = fake_file_io.FakeFile(resolver_context, b'') resolver_context.CacheFileObject(path_spec, file_object) self.assertEqual(len(resolver_context._file_object_cache._values), 1) cached_object = resolver_context.GetFileObject(path_spec) self.assertEqual(cached_object, file_object) resolver_context.GrabFileObject(path_spec) self.assertEqual(len(resolver_context._file_object_cache._values), 1) resolver_context.GrabFileObject(path_spec) self.assertEqual(len(resolver_context._file_object_cache._values), 1) resolver_context.ReleaseFileObject(file_object) self.assertEqual(len(resolver_context._file_object_cache._values), 1) resolver_context.ReleaseFileObject(file_object) self.assertEqual(len(resolver_context._file_object_cache._values), 0)
def GetFileObject(self, data_stream_name=u''): """Retrieves the file-like object. Args: data_stream_name: optional data stream name. The default is an empty string which represents the default data stream. Returns: A file-like object (instance of file_io.FileIO) or None. Raises: IOError: if the file entry is not a file. """ if not self.IsFile(): raise IOError(u'Cannot open non-file.') if data_stream_name: return location = getattr(self.path_spec, u'location', None) if location is None: return file_data = self._file_system.GetDataByPath(location) file_object = fake_file_io.FakeFile(self._resolver_context, file_data) file_object.open(path_spec=self.path_spec) return file_object
def testOpenClosePathSpec(self): """Test the open and close functionality using a path specification.""" file_object = fake_file_io.FakeFile(self._resolver_context, self._FILE_DATA1) file_object.open(path_spec=self._path_spec1) self.assertEqual(file_object.get_size(), 116) file_object.close()
def testRead(self): """Test the read functionality.""" file_object = fake_file_io.FakeFile(self._resolver_context, self._FILE_DATA1) file_object.open(path_spec=self._path_spec1) read_buffer = file_object.read() expected_buffer = (b'place,user,password\n' b'bank,joesmith,superrich\n' b'alarm system,-,1234\n' b'treasure chest,-,1111\n' b'uber secret laire,admin,admin\n') self.assertEqual(read_buffer, expected_buffer) file_object.close()
def GetFileObject(self): """Retrieves the file-like object (instance of file_io.FakeFile). Raises: IOError: if the file entry is not a file. """ if not self.IsFile(): raise IOError(u'Cannot open non-file.') location = getattr(self.path_spec, u'location', None) if location is None: return file_data = self._file_system.GetDataByPath(location) file_object = fake_file_io.FakeFile(self._resolver_context, file_data) file_object.open(path_spec=self.path_spec) return file_object
def _CreateFileObject(self, filename, data): """Creates a file-like object. Args: filename (str): name of the file. data (bytes): data of the file. Returns: dfvfs.FakeFile: file-like object. """ resolver_context = dfvfs_context.Context() file_object = fake_file_io.FakeFile(resolver_context, data) location = '/{0:s}'.format(filename) test_path_spec = fake_path_spec.FakePathSpec(location=location) file_object.open(path_spec=test_path_spec) return file_object
def testRead(self): """Test the read function.""" test_file = '/test_data/password.txt' test_path_spec = fake_path_spec.FakePathSpec(location=test_file) file_object = fake_file_io.FakeFile(self._resolver_context, test_path_spec, self._FILE_DATA1) file_object.Open() read_buffer = file_object.read() expected_buffer = (b'place,user,password\n' b'bank,joesmith,superrich\n' b'alarm system,-,1234\n' b'treasure chest,-,1111\n' b'uber secret laire,admin,admin\n') self.assertEqual(read_buffer, expected_buffer)
def testSeek(self): """Test the seek function.""" test_file = '/test_data/another_file' test_path_spec = fake_path_spec.FakePathSpec(location=test_file) file_object = fake_file_io.FakeFile(self._resolver_context, self._FILE_DATA2) file_object.open(path_spec=test_path_spec) self.assertEqual(file_object.get_size(), 22) file_object.seek(10) self.assertEqual(file_object.read(5), b'other') self.assertEqual(file_object.get_offset(), 15) file_object.seek(-10, os.SEEK_END) self.assertEqual(file_object.read(5), b'her f') file_object.seek(2, os.SEEK_CUR) self.assertEqual(file_object.read(2), b'e.') # Conforming to the POSIX seek the offset can exceed the file size # but reading will result in no data being returned. file_object.seek(300, os.SEEK_SET) self.assertEqual(file_object.get_offset(), 300) self.assertEqual(file_object.read(2), b'') with self.assertRaises(IOError): file_object.seek(-10, os.SEEK_SET) # On error the offset should not change. self.assertEqual(file_object.get_offset(), 300) with self.assertRaises(IOError): file_object.seek(10, 5) # On error the offset should not change. self.assertEqual(file_object.get_offset(), 300) file_object.close()
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant user timestamp entries. Args: parser_mediator: A parser mediator object (instance of ParserMediator). match: Optional dictionary containing keys extracted from PLIST_KEYS. The default is None. """ if u'name' not in match or u'uid' not in match: return account = match[u'name'][0] uid = match[u'uid'][0] cocoa_zero = (timelib.Timestamp.COCOA_TIME_TO_POSIX_BASE * timelib.Timestamp.MICRO_SECONDS_PER_SECOND) # INFO: binplist return a string with the Plist XML. for policy in match.get(u'passwordpolicyoptions', []): try: xml_policy = ElementTree.fromstring(policy) except (ElementTree.ParseError, LookupError) as exception: logging.error(( u'Unable to parse XML structure for an user policy, account: ' u'{0:s} and uid: {1!s}, with error: {2:s}').format( account, uid, exception)) continue for dict_elements in xml_policy.iterfind(u'dict'): key_values = [ value.text for value in dict_elements.getchildren() ] # Taking a list and converting it to a dict, using every other item # as the key and the other one as the value. policy_dict = dict(zip(key_values[0::2], key_values[1::2])) time_string = policy_dict.get(u'passwordLastSetTime', None) if time_string: try: timestamp = timelib.Timestamp.FromTimeString(time_string) except errors.TimestampError: parser_mediator.ProduceParseError( u'Unable to parse time string: {0:s}'.format( time_string)) timestamp = 0 shadow_hash_data = match.get(u'ShadowHashData', None) if timestamp > cocoa_zero and isinstance( shadow_hash_data, (list, tuple)): # Extract the hash password information. # It is store in the attribute ShadowHasData which is # a binary plist data; However binplist only extract one # level of binary plist, then it returns this information # as a string. # TODO: change this into a DataRange instead. For this we # need the file offset and size of the ShadowHashData value data. shadow_hash_data = shadow_hash_data[0] resolver_context = context.Context() fake_file = fake_file_io.FakeFile(resolver_context, shadow_hash_data) fake_file.open(path_spec=fake_path_spec.FakePathSpec( location=u'ShadowHashData')) try: plist_file = binplist.BinaryPlist(file_obj=fake_file) top_level = plist_file.Parse() except binplist.FormatError: top_level = dict() salted_hash = top_level.get(u'SALTED-SHA512-PBKDF2', None) if salted_hash: password_hash = u'$ml${0:d}${1:s}${2:s}'.format( salted_hash[u'iterations'], binascii.hexlify(salted_hash[u'salt']), binascii.hexlify(salted_hash[u'entropy'])) else: password_hash = u'N/A' description = ( u'Last time {0:s} ({1!s}) changed the password: {2!s}' ).format(account, uid, password_hash) event_object = plist_event.PlistTimeEvent( self._ROOT, u'passwordLastSetTime', timestamp, description) parser_mediator.ProduceEvent(event_object) time_string = policy_dict.get(u'lastLoginTimestamp', None) if time_string: try: timestamp = timelib.Timestamp.FromTimeString(time_string) except errors.TimestampError: parser_mediator.ProduceParseError( u'Unable to parse time string: {0:s}'.format( time_string)) timestamp = 0 description = u'Last login from {0:s} ({1!s})'.format( account, uid) if timestamp > cocoa_zero: event_object = plist_event.PlistTimeEvent( self._ROOT, u'lastLoginTimestamp', timestamp, description) parser_mediator.ProduceEvent(event_object) time_string = policy_dict.get(u'failedLoginTimestamp', None) if time_string: try: timestamp = timelib.Timestamp.FromTimeString(time_string) except errors.TimestampError: parser_mediator.ProduceParseError( u'Unable to parse time string: {0:s}'.format( time_string)) timestamp = 0 description = ( u'Last failed login from {0:s} ({1!s}) ({2!s} times)' ).format(account, uid, policy_dict.get(u'failedLoginCount', 0)) if timestamp > cocoa_zero: event_object = plist_event.PlistTimeEvent( self._ROOT, u'failedLoginTimestamp', timestamp, description) parser_mediator.ProduceEvent(event_object)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant user timestamp entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ if 'name' not in match or 'uid' not in match: return account = match['name'][0] uid = match['uid'][0] for policy in match.get('passwordpolicyoptions', []): try: xml_policy = ElementTree.fromstring(policy) except (ElementTree.ParseError, LookupError) as exception: logger.error(( 'Unable to parse XML structure for an user policy, account: ' '{0:s} and uid: {1!s}, with error: {2!s}').format( account, uid, exception)) continue for dict_elements in xml_policy.iterfind('dict'): key_values = [value.text for value in iter(dict_elements)] # Taking a list and converting it to a dict, using every other item # as the key and the other one as the value. policy_dict = dict(zip(key_values[0::2], key_values[1::2])) time_string = policy_dict.get('passwordLastSetTime', None) if time_string and time_string != '2001-01-01T00:00:00Z': try: date_time = dfdatetime_time_elements.TimeElements() date_time.CopyFromStringISO8601(time_string) except ValueError: date_time = None parser_mediator.ProduceExtractionWarning( 'unable to parse password last set time string: {0:s}'. format(time_string)) shadow_hash_data = match.get('ShadowHashData', None) if date_time and isinstance(shadow_hash_data, (list, tuple)): # Extract the hash password information. # It is store in the attribute ShadowHasData which is # a binary plist data; However biplist only extracts one # level of binary plist, then it returns this information # as a string. # TODO: change this into a DataRange instead. For this we # need the file offset and size of the ShadowHashData value data. shadow_hash_data = shadow_hash_data[0] resolver_context = context.Context() fake_file = fake_file_io.FakeFile(resolver_context, shadow_hash_data) shadow_hash_data_path_spec = fake_path_spec.FakePathSpec( location='ShadowHashData') fake_file.open(path_spec=shadow_hash_data_path_spec) try: plist_file = biplist.readPlist(fake_file) except biplist.InvalidPlistException: plist_file = {} salted_hash = plist_file.get('SALTED-SHA512-PBKDF2', None) if salted_hash: salt_hex_bytes = codecs.encode(salted_hash['salt'], 'hex') salt_string = codecs.decode(salt_hex_bytes, 'ascii') entropy_hex_bytes = codecs.encode( salted_hash['entropy'], 'hex') entropy_string = codecs.decode(entropy_hex_bytes, 'ascii') password_hash = '$ml${0:d}${1:s}${2:s}'.format( salted_hash['iterations'], salt_string, entropy_string) else: password_hash = 'N/A' event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Last time {0:s} ({1!s}) changed the password: {2!s}' ).format(account, uid, password_hash) event_data.key = 'passwordLastSetTime' event_data.root = self._ROOT event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData( event, event_data) time_string = policy_dict.get('lastLoginTimestamp', None) if time_string and time_string != '2001-01-01T00:00:00Z': try: date_time = dfdatetime_time_elements.TimeElements() date_time.CopyFromStringISO8601(time_string) except ValueError: date_time = None parser_mediator.ProduceExtractionWarning( 'unable to parse last login time string: {0:s}'.format( time_string)) if date_time: event_data = plist_event.PlistTimeEventData() event_data.desc = 'Last login from {0:s} ({1!s})'.format( account, uid) event_data.key = 'lastLoginTimestamp' event_data.root = self._ROOT event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData( event, event_data) time_string = policy_dict.get('failedLoginTimestamp', None) if time_string and time_string != '2001-01-01T00:00:00Z': try: date_time = dfdatetime_time_elements.TimeElements() date_time.CopyFromStringISO8601(time_string) except ValueError: date_time = None parser_mediator.ProduceExtractionWarning( 'unable to parse failed login time string: {0:s}'. format(time_string)) if date_time: event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Last failed login from {0:s} ({1!s}) ({2!s} times)' ).format(account, uid, policy_dict.get('failedLoginCount', 0)) event_data.key = 'failedLoginTimestamp' event_data.root = self._ROOT event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData( event, event_data)
def GetEntries(self, parser_context, match=None, **unused_kwargs): """Extracts relevant user timestamp entries. Args: parser_context: A parser context object (instance of ParserContext). match: Optional dictionary containing keys extracted from PLIST_KEYS. The default is None. """ account = match['name'][0] uid = match['uid'][0] cocoa_zero = ( timelib.Timestamp.COCOA_TIME_TO_POSIX_BASE * timelib.Timestamp.MICRO_SECONDS_PER_SECOND) # INFO: binplist return a string with the Plist XML. for policy in match['passwordpolicyoptions']: xml_policy = ElementTree.fromstring(policy) for dict_elements in xml_policy.iterfind('dict'): key_values = [value.text for value in dict_elements.getchildren()] policy_dict = dict(zip(key_values[0::2], key_values[1::2])) if policy_dict.get('passwordLastSetTime', 0): timestamp = timelib.Timestamp.FromTimeString( policy_dict.get('passwordLastSetTime', '0')) if timestamp > cocoa_zero: # Extract the hash password information. # It is store in the attribure ShadowHasData which is # a binary plist data; However binplist only extract one # level of binary plist, then it returns this information # as a string. # TODO: change this into a DataRange instead. For this we # need the file offset and size of the ShadowHashData value data. resolver_context = context.Context() fake_file = fake_file_io.FakeFile( resolver_context, match['ShadowHashData'][0]) fake_file.open(path_spec=fake_path_spec.FakePathSpec( location=u'ShadowHashData')) try: plist_file = binplist.BinaryPlist(file_obj=fake_file) top_level = plist_file.Parse() except binplist.FormatError: top_level = dict() salted_hash = top_level.get('SALTED-SHA512-PBKDF2', None) if salted_hash: password_hash = u'$ml${0:d}${1:s}${2:s}'.format( salted_hash['iterations'], binascii.hexlify(salted_hash['salt']), binascii.hexlify(salted_hash['entropy'])) else: password_hash = u'N/A' description = ( u'Last time {0:s} ({1!s}) changed the password: {2!s}').format( account, uid, password_hash) event_object = plist_event.PlistTimeEvent( self._ROOT, u'passwordLastSetTime', timestamp, description) parser_context.ProduceEvent(event_object, plugin_name=self.NAME) if policy_dict.get('lastLoginTimestamp', 0): timestamp = timelib.Timestamp.FromTimeString( policy_dict.get('lastLoginTimestamp', '0')) description = u'Last login from {0:s} ({1!s})'.format(account, uid) if timestamp > cocoa_zero: event_object = plist_event.PlistTimeEvent( self._ROOT, u'lastLoginTimestamp', timestamp, description) parser_context.ProduceEvent(event_object, plugin_name=self.NAME) if policy_dict.get('failedLoginTimestamp', 0): timestamp = timelib.Timestamp.FromTimeString( policy_dict.get('failedLoginTimestamp', '0')) description = ( u'Last failed login from {0:s} ({1!s}) ({2!s} times)').format( account, uid, policy_dict['failedLoginCount']) if timestamp > cocoa_zero: event_object = plist_event.PlistTimeEvent( self._ROOT, u'failedLoginTimestamp', timestamp, description) parser_context.ProduceEvent(event_object, plugin_name=self.NAME)