def testIterator(self): """Tests the iterator functionality.""" test_file_path = self._GetTestFilePath(['password.csv']) self._SkipIfPathNotExists(test_file_path) resolver_context = context.Context() test_path_spec = os_path_spec.OSPathSpec(location=test_file_path) file_object = path_spec_resolver.Resolver.OpenFileObject( test_path_spec, resolver_context=resolver_context) line_reader = line_reader_file.BinaryLineReader(file_object) dsv_reader = line_reader_file.BinaryDSVReader(line_reader, delimiter=b',') rows = [] for row in dsv_reader: rows.append(row) self.assertEqual(len(rows), 5) self.assertEqual(rows[0], [b'place', b'user', b'password']) self.assertEqual(rows[1], [b'bank', b'joesmith', b'superrich']) self.assertEqual(rows[2], [b'alarm system', b'-', b'1234']) self.assertEqual(rows[3], [b'treasure chest', b'-', b'1111']) self.assertEqual(rows[4], [b'uber secret laire', b'admin', b'admin'])
def _ParseFileData(self, mediator, file_object): """Parses file content (data) for user account preprocessing attributes. Args: mediator (PreprocessMediator): mediates interactions between preprocess plugins and other components, such as storage and knowledge base. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails. """ line_reader = line_reader_file.BinaryLineReader(file_object) try: reader = line_reader_file.BinaryDSVReader(line_reader, b':') except csv.Error as exception: raise errors.PreProcessFail( 'Unable to read: {0:s} with error: {1!s}'.format( self.ARTIFACT_DEFINITION_NAME, exception)) for line_number, row in enumerate(reader): if len(row) < 7 or not row[0] or not row[2]: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unsupported number of values in line: {0:d}.'.format( line_number)) continue try: username = row[0].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode username.') continue try: identifier = row[2].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode user identifier.') continue group_identifier = None if row[3]: try: group_identifier = row[3].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode group identifier.') full_name = None if row[4]: try: full_name = row[4].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode full name.') user_directory = None if row[5]: try: user_directory = row[5].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode user directory.') shell = None if row[6]: try: shell = row[6].decode('utf-8') except UnicodeDecodeError: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to decode shell.') user_account = artifacts.UserAccountArtifact(identifier=identifier, username=username) user_account.group_identifier = group_identifier user_account.full_name = full_name user_account.user_directory = user_directory user_account.shell = shell try: mediator.AddUserAccount(user_account) except KeyError as exception: mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to add user account with error: {0!s}'.format( exception))
def _ParseFileData(self, knowledge_base, file_object): """Parses file content (data) for user account preprocessing attributes. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails. """ line_reader = line_reader_file.BinaryLineReader(file_object) try: reader = line_reader_file.BinaryDSVReader(line_reader, b':') except csv.Error as exception: raise errors.PreProcessFail( 'Unable to read: {0:s} with error: {1!s}'.format( self.ARTIFACT_DEFINITION_NAME, exception)) for row in reader: if len(row) < 7 or not row[0] or not row[2]: # TODO: add and store preprocessing errors. continue try: username = row[0].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode username.') continue try: identifier = row[2].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode identifier.') continue group_identifier = None if row[3]: try: group_identifier = row[3].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode group identifier.') full_name = None if row[4]: try: full_name = row[4].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode full name.') user_directory = None if row[5]: try: user_directory = row[5].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode user directory.') shell = None if row[6]: try: shell = row[6].decode('utf-8') except UnicodeDecodeError: # TODO: add and store preprocessing errors. logger.error('Unable to decode shell.') user_account = artifacts.UserAccountArtifact(identifier=identifier, username=username) user_account.group_identifier = group_identifier user_account.full_name = full_name user_account.user_directory = user_directory user_account.shell = shell try: knowledge_base.AddUserAccount(user_account) except KeyError: # TODO: add and store preprocessing errors. pass