def test_copy_bypass_s3_with_env(self): # given self.s3_conn.create_bucket('dest_bucket') reader = { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'prefix': 'some_prefix/' } } options = create_s3_bypass_simple_config(reader=reader) env = { 'EXPORTERS_S3READER_AWS_KEY': 'a', 'EXPORTERS_S3READER_AWS_SECRET': 'b' } # when: with closing(S3Bypass(options, meta())) as bypass: with environment(env): bypass.execute() # then: bucket = self.s3_conn.get_bucket('dest_bucket') key = next(iter(bucket.list('some_prefix/'))) self.assertEquals('some_prefix/test_key', key.name) self.assertEqual(self.data, json.loads(key.get_contents_as_string())) self.assertEqual(bypass.total_items, len(self.data), 'Bypass got an incorrect number of total items')
def test_copy_bypass_s3_with_env(self): # given self.s3_conn.create_bucket('dest_bucket') reader = { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'prefix': 'some_prefix/' } } options = create_s3_bypass_simple_config(reader=reader) env = { 'EXPORTERS_S3READER_AWS_KEY': 'a', 'EXPORTERS_S3READER_AWS_SECRET': 'b' } # when: with closing(S3Bypass(options, meta())) as bypass: with environment(env): bypass.execute() # then: bucket = self.s3_conn.get_bucket('dest_bucket') key = next(iter(bucket.list('some_prefix/'))) self.assertEquals('some_prefix/test_key', key.name) self.assertEqual(self.data, json.loads(key.get_contents_as_string())) self.assertEqual( bypass.total_items, len(self.data), 'Bypass got an incorrect number of total items')
def test_failed_dump_in_scrapy_cloud(self, mock_ses): with environment(dict(SHUB_JOBKEY='10804/1/12')): self.notifier.notify_failed_job('REASON', 'STACKTRACE', ['*****@*****.**']) mock_ses.return_value.send_email.assert_called_once_with( _MAIL_FROM, 'Failed export job for Customer', u'\nExport job failed with following error:\n\n' u'REASON\n\n' u'Job key: 10804/1/12\n' u'Job: https://dash.scrapinghub.com/p/10804/job/1/12\n\n' u'Stacktrace:\nSTACKTRACE\n\n' u'Configuration:\n' + json.dumps(self.options), mock.ANY)
def test_failed_dump_in_scrapy_cloud(self, mock_ses): with environment(dict(SHUB_JOBKEY='10804/1/12')): self.notifier.notify_failed_job('REASON', 'STACKTRACE', ['*****@*****.**']) mock_ses.return_value.send_email.assert_called_once_with( DEFAULT_MAIN_FROM, 'Failed export job for Customer', u'\nExport job failed with following error:\n\n' u'REASON\n\n' u'Job key: 10804/1/12\n' u'Job: https://dash.scrapinghub.com/p/10804/job/1/12\n\n' u'Stacktrace:\nSTACKTRACE\n\n' u'Configuration:\n' + json.dumps(self.options), mock.ANY )
def test_load_from_env(self): # given reader = { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'prefix': 'some_prefix/' } } options = create_s3_bypass_simple_config(reader=reader) # when: bypass = S3Bypass(options, meta()) # then expected = '123' with environment({'EXPORTERS_S3READER_AWS_KEY': expected}): self.assertEqual(bypass.read_option('reader', 'aws_access_key_id'), expected) self.assertIsNone(bypass.read_option('reader', 'aws_access_key_id'))
def test_load_from_env(self): # given reader = { 'name': 'exporters.readers.s3_reader.S3Reader', 'options': { 'bucket': 'source_bucket', 'prefix': 'some_prefix/' } } options = create_s3_bypass_simple_config(reader=reader) # when: bypass = S3Bypass(options, meta()) # then expected = '123' with environment({'EXPORTERS_S3READER_AWS_KEY': expected}): self.assertEqual(bypass.read_option('reader', 'aws_access_key_id'), expected) self.assertIsNone(bypass.read_option('reader', 'aws_access_key_id'))