def test_invalid_notification(self): options = valid_config_with_updates({ 'exporter_options': { "notifications": [{}] } }) with self.assertRaisesRegexp(ValueError, 'Module name is missing'): ExporterConfig(options) options = valid_config_with_updates({ 'exporter_options': { "notifications": [{ "name": "invalid.module.name" }] } }) with self.assertRaisesRegexp(ValueError, 'No module named'): ExporterConfig(options) options = valid_config_with_updates({ 'exporter_options': { "notifications": [{ "name": "exporters.export_formatter.json_export_formatter.JsonExportFormatter", "options": { "unsuported_option": True } }] } }) with self.assertRaisesRegexp(ValueError, 'unsupported_options'): ExporterConfig(options)
def test_not_implemented(self): bypass_script = BaseBypass( ExporterConfig(valid_config_with_updates({})), None) with self.assertRaises(NotImplementedError): BaseBypass.meets_conditions({}) with self.assertRaises(NotImplementedError): bypass_script.execute()
def test_configuration_from_uri(self): self.maxDiff = None # given: dbfile = '%s/dbfile.db' % self.tmp_folder configuration = valid_config_with_updates({ 'persistence': { 'name': 'exporters.persistence.alchemy_persistence.SqlitePersistence', 'options': { 'database': dbfile, } }, 'exporter_options': {'prevent_bypass': True, 'resume': False}, }) config = ExporterConfig(configuration) persistence = SqlitePersistence(config.persistence_options, meta()) job_id = persistence.persistence_state_id # when: persistence_uri = 'sqlite://%s:%s' % (dbfile, job_id) recovered_config = SqlitePersistence.configuration_from_uri(persistence_uri) # then: expected = deepcopy(configuration) expected['exporter_options']['resume'] = True expected['exporter_options']['persistence_state_id'] = job_id expected_exporter_options = dict(configuration['exporter_options'], resume=True, persistence_state_id=job_id) self.assertEqual(expected_exporter_options, recovered_config['exporter_options']) self.assertEqual(expected['reader'], recovered_config['reader']) self.assertEqual(expected['writer'], recovered_config['writer'])
def test_commit(self, mock_uuid, mock_dump_pickle, mock_open): mock_dump_pickle.return_value = True mock_uuid.return_value = 1 exporter_config = ExporterConfig(self.config) persistence = PicklePersistence(exporter_config.persistence_options, meta()) self.assertEqual(None, persistence.commit_position(10)) self.assertEqual(persistence.get_metadata('commited_positions'), 1)
def test_valid_formatter(self): options = valid_config_with_updates({ 'exporter_options': { "formatter": { "name": "exporters.export_formatter.json_export_formatter.JsonExportFormatter" } } }) ExporterConfig(options) # should not raise
def test_valid_notification(self): options = valid_config_with_updates({ 'exporter_options': { "notifications": [{ "name": "exporters.notifications.ses_mail_notifier.SESMailNotifier", }] } }) ExporterConfig(options) # should not raise
def test_get_last_position(self, mock_load_pickle, mock_dump_pickle, mock_open, mock_is_file): mock_dump_pickle.return_value = True mock_is_file.return_value = True mock_load_pickle.return_value = {'last_position': {'last_key': 10}} exporter_config = ExporterConfig(self.config) persistence = PicklePersistence(exporter_config.persistence_options, meta()) self.assertEqual({'last_key': 10}, persistence.get_last_position())
def test_invalid_formatter(self): options = valid_config_with_updates({ 'exporter_options': { "formatter": {} } }) with self.assertRaisesRegexp(ValueError, 'Module name is missing'): ExporterConfig(options) options = valid_config_with_updates({ 'exporter_options': { "formatter": { "name": "invalid.module.name" } } }) with self.assertRaisesRegexp(ValueError, 'No module named'): ExporterConfig(options)
def test_invalid_homogeneus_list(self): options = valid_config_with_updates({ 'filter': { 'name': 'exporters.filters.key_value_filters.KeyValueFilter', 'options': { "keys": ['This', 'should', 'be', 'dicts'] } } }) with self.assertRaisesRegexp(ValueError, 'Wrong type'): ExporterConfig(options)
def test_long_values(self): options = valid_config_with_updates({ "reader": { "name": "exporters.readers.hubstorage_reader.HubstorageReader", "options": { "collection_name": "asd", "project_id": 2**70, # long in PY2, int in PY3 } } }) ExporterConfig(options) # should not raise
def test_create_persistence_job(self, mock_uuid, mock_pickle): file_name = '1' mock_pickle.dump.return_value = True mock_uuid.return_value = file_name exporter_config = ExporterConfig(self.config) try: persistence = PicklePersistence( exporter_config.persistence_options, meta()) self.assertIsInstance(persistence, PicklePersistence) persistence.close() finally: remove_if_exists('/tmp/' + file_name)
def test_supported_and_not_supported_options(self): options = valid_config_with_updates({ 'writer': { 'name': 'exporters.writers.console_writer.ConsoleWriter', 'options': { 'items_limit': 1234, 'not_a_supported_option': 'foo' } }, }) with self.assertRaisesRegexp(ValueError, 'unsupported_options'): ExporterConfig(options)
def test_transform_valid_class(self): options = valid_config_with_updates({ 'exporter_options': { 'LOG_LEVEL': 'DEBUG', 'LOGGER_NAME': 'export-pipeline' }, 'transform': { 'name': 'exporters.filters.no_filter.NoFilter', 'options': {} } }) with self.assertRaises(TypeError): o = ExporterConfig(options) self.module_loader.load_transform(o.transform_options)
def test_notifier_valid_class(self): options = valid_config_with_updates({ 'exporter_options': { 'LOG_LEVEL': 'DEBUG', 'LOGGER_NAME': 'export-pipeline' }, 'notifier': { 'name': 'exporters.transform.no_transform.NoTransform', 'options': {} } }) with self.assertRaises(TypeError): o = ExporterConfig(options) self.module_loader.load_notifier(o.notifiers)
def test_formatter_valid_class(self): options = valid_config_with_updates({ 'exporter_options': { 'LOG_LEVEL': 'DEBUG', 'LOGGER_NAME': 'export-pipeline', "EXPORTER": 'exporters.writers.console_writer.ConsoleWriter', }, 'formatter': { 'name': 'exporters.transform.no_transform.NoTransform', 'options': {} }, }) with self.assertRaises(TypeError): o = ExporterConfig(options) self.module_loader.load_formatter(o.reader_options)
def test_not_meet_supported_options(self): exporter_options = ExporterConfig( valid_config_with_updates({ 'writer': { 'name': 'exporters.writers.s3_writer.S3Writer', 'options': { 'bucket': 'mock', 'filebase': 'mock' } }, 'exporter_options': { 'formatter': JSON_FORMATTER } })) self.assertFalse(S3Bypass.meets_conditions(exporter_options))
def test_supported_and_not_supported_options_for_subclass(self): mod_name = __name__ + '.SampleSubclassWriter' options = valid_config_with_updates({ 'writer': { 'name': mod_name, 'options': { 'filebase': 'blah', 'someoption': 'blah', 'not_supported_option': 'foo', } } }) with self.assertRaisesRegexp(ValueError, 'unsupported_options'): ExporterConfig(options)
def test_writer_valid_class(self): options = valid_config_with_updates({ 'exporter_options': { 'LOG_LEVEL': 'DEBUG', 'LOGGER_NAME': 'export-pipeline' }, 'writer': { 'name': 'exporters.readers.random_reader.RandomReader', 'options': { 'number_of_items': 1000, 'batch_size': 100 } }, }) with self.assertRaises(TypeError): o = ExporterConfig(options) self.module_loader.load_writer(o.writer_options)
def __init__(self, configuration): self.config = ExporterConfig(configuration) self.threaded = self.config.exporter_options.get('threaded', False) self.logger = ExportManagerLogger(self.config.log_options) self.module_loader = ModuleLoader() metadata = ExportMeta(configuration) self.metadata = metadata self.reader = self.module_loader.load_reader( self.config.reader_options, metadata) if is_stream_reader(self.reader): deserializer = self.module_loader.load_deserializer( self.config.deserializer_options, metadata) decompressor = self.module_loader.load_decompressor( self.config.decompressor_options, metadata) self.reader.deserializer = deserializer self.reader.decompressor = decompressor self.filter_before = self.module_loader.load_filter( self.config.filter_before_options, metadata) self.filter_after = self.module_loader.load_filter( self.config.filter_after_options, metadata) self.transform = self.module_loader.load_transform( self.config.transform_options, metadata) self.export_formatter = self.module_loader.load_formatter( self.config.formatter_options, metadata) self.writer = self.module_loader.load_writer( self.config.writer_options, metadata, export_formatter=self.export_formatter) self.persistence = self.module_loader.load_persistence( self.config.persistence_options, metadata) self.grouper = self.module_loader.load_grouper( self.config.grouper_options, metadata) self.notifiers = NotifiersList(self.config.notifiers, metadata) if self.config.disable_retries: disable_retries() self.logger.debug('{} has been initiated'.format( self.__class__.__name__)) self.stats_manager = self.module_loader.load_stats_manager( self.config.stats_options, metadata) self.bypass_cases = []
def create_stream_bypass_simple_config(**kwargs): config = { 'reader': { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'aws_access_key_id': 'a', 'aws_secret_access_key': 'a', 'prefix': 'some_prefix/' } }, 'writer': { 'name': 'exporters.writers.gstorage_writer.GStorageWriter', 'options': { 'bucket': 'dest_bucket', 'project': "", 'credentials': {}, 'filebase': 'some_prefix/', } } } config.update(kwargs) return ExporterConfig(config)
def create_s3_bypass_simple_config(**kwargs): config = { 'reader': { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'aws_access_key_id': 'a', 'aws_secret_access_key': 'a', 'prefix': 'some_prefix/' } }, 'writer': { 'name': 'exporters.writers.s3_writer.S3Writer', 'options': { 'bucket': 'dest_bucket', 'aws_access_key_id': 'b', 'aws_secret_access_key': 'b', 'filebase': 'some_prefix/' } } } config.update(kwargs) return ExporterConfig(config)
def test_curate_options(self): options = {} with self.assertRaises(ConfigurationError): ExporterConfig(options) options = {'reader': ''} with self.assertRaises(ConfigurationError): ExporterConfig(options) options = {'reader': '', 'filter': ''} with self.assertRaises(ConfigurationError): ExporterConfig(options) options = {'reader': '', 'filter': '', 'transform': ''} with self.assertRaises(ConfigurationError): ExporterConfig(options) options = {'reader': '', 'filter': '', 'transform': '', 'writer': ''} with self.assertRaises(ConfigurationError): ExporterConfig(options) self.assertIsInstance(ExporterConfig(VALID_EXPORTER_CONFIG), ExporterConfig)
def create_s3_azure_file_bypass_simple_config(**kwargs): config = create_s3_azure_file_bypass_simple_opts(**kwargs) return ExporterConfig(config)
def test_commit_position(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.commit_position(1)
def test_generate_new_job(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.generate_new_job()
def test_delete_instance(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.close()
def test_category_critical(self): options = ExporterConfig(self.options) logger = CategoryLogger(options.log_options) logger.critical('Critial message')
def test_category_warning(self): options = ExporterConfig(self.options) logger = CategoryLogger(options.log_options) logger.warning('Warning message')