def test_run_exporter_integration(self, mock_requests): # given: endpoint_url = "http://fake-domain.us-west-2.cloudsearch.amazonaws.com" config = { "label": "unittest", "reader": {"name": "exporters.readers.random_reader.RandomReader", "options": {"number_of_items": 100}}, "transform": { "name": "exporters.transform.jq_transform.JQTransform", "options": { "jq_filter": ( "{key: .key, country: .country_code, value: .value} |" " del(.[] | select(. == null))" ) }, }, "writer": { "name": "exporters.writers.cloudsearch_writer.CloudSearchWriter", "options": {"endpoint_url": endpoint_url, "id_field": "key"}, }, } try: # when: exporter = BasicExporter(config) exporter.export() # then: self.assertEqual(1, len(mock_requests.post.mock_calls)) url = endpoint_url + "/2013-01-01/documents/batch" mock_requests.post.assert_called_once_with(url, data=mock.ANY, headers=mock.ANY) finally: exporter.persistence.delete()
def test_parses_the_options_and_loads_pipeline_items(self): exporter = BasicExporter(self.options) try: self.assertTrue(isinstance(exporter.reader, RandomReader)) self.assertTrue(isinstance(exporter.writer, ConsoleWriter)) self.assertTrue(isinstance(exporter.transform, NoTransform)) exporter._clean_export_job() finally: exporter.persistence.delete()
def run(args): try: if args.resume: exporter = BasicExporter.from_persistence_configuration(args.resume) else: exporter = BasicExporter.from_file_configuration(args.config) except ConfigurationError as e: logging.error(e) else: exporter.export()
def run(args): try: if args.resume: exporter = BasicExporter.from_persistence_configuration( args.resume) else: exporter = BasicExporter.from_file_configuration(args.config) except ConfigurationError as e: logging.error(e) else: exporter.export()
def test_from_file_configuration(self): test_manager = BasicExporter.from_file_configuration('./tests/data/basic_config.json') try: self.assertIsInstance(test_manager, BasicExporter) test_manager._clean_export_job() finally: test_manager.persistence.delete()
def test_from_file_configuration(self): test_manager = BasicExporter.from_file_configuration( './tests/data/basic_config.json') try: self.assertIsInstance(test_manager, BasicExporter) test_manager._clean_export_job() finally: test_manager.persistence.delete()
def test_run_exporter_integration(self, mock_requests): # given: endpoint_url = "http://fake-domain.us-west-2.cloudsearch.amazonaws.com" config = { "label": "unittest", "reader": { "name": "exporters.readers.random_reader.RandomReader", "options": { "number_of_items": 100 } }, "transform": { "name": "exporters.transform.jq_transform.JQTransform", "options": { "jq_filter": ("{key: .key, country: .country_code, value: .value} |" " del(.[] | select(. == null))") } }, "writer": { "name": "exporters.writers.cloudsearch_writer.CloudSearchWriter", "options": { "endpoint_url": endpoint_url, "id_field": "key" } } } try: # when: exporter = BasicExporter(config) exporter.export() # then: self.assertEqual(1, len(mock_requests.post.mock_calls)) url = endpoint_url + '/2013-01-01/documents/batch' mock_requests.post.assert_called_once_with(url, data=mock.ANY, headers=mock.ANY) finally: exporter.persistence.delete()
def test_bypass(self): # given: opts = create_s3_azure_file_bypass_simple_opts() # when: with moto.mock_s3(), mock.patch('azure.storage.file.FileService') as azure: s3_conn = boto.connect_s3() bucket = s3_conn.create_bucket(opts['reader']['options']['bucket']) keys = ['some_prefix/{}'.format(k) for k in ['some', 'keys', 'here']] create_s3_keys(bucket, keys) exporter = BasicExporter(opts) exporter.export() # then: self.assertEquals(exporter.writer.get_metadata('items_count'), 0, "No items should be read") self.assertEquals(exporter.reader.get_metadata('read_items'), 0, "No items should get written") azure_puts = [ call for call in azure.mock_calls if call[0] == '().copy_file' ] self.assertEquals(len(azure_puts), len(keys), "all keys should be put into Azure files")