コード例 #1
0
ファイル: test_feedexport.py プロジェクト: pengchant/scrapy
 def test_parse_credentials(self):
     try:
         import boto  # noqa: F401
     except ImportError:
         raise unittest.SkipTest("S3FeedStorage requires boto")
     aws_credentials = {'AWS_ACCESS_KEY_ID': 'settings_key',
                        'AWS_SECRET_ACCESS_KEY': 'settings_secret'}
     crawler = get_crawler(settings_dict=aws_credentials)
     # Instantiate with crawler
     storage = S3FeedStorage.from_crawler(crawler,
                                          's3://mybucket/export.csv')
     self.assertEqual(storage.access_key, 'settings_key')
     self.assertEqual(storage.secret_key, 'settings_secret')
     # Instantiate directly
     storage = S3FeedStorage('s3://mybucket/export.csv',
                             aws_credentials['AWS_ACCESS_KEY_ID'],
                             aws_credentials['AWS_SECRET_ACCESS_KEY'])
     self.assertEqual(storage.access_key, 'settings_key')
     self.assertEqual(storage.secret_key, 'settings_secret')
     # URI priority > settings priority
     storage = S3FeedStorage('s3://uri_key:uri_secret@mybucket/export.csv',
                             aws_credentials['AWS_ACCESS_KEY_ID'],
                             aws_credentials['AWS_SECRET_ACCESS_KEY'])
     self.assertEqual(storage.access_key, 'uri_key')
     self.assertEqual(storage.secret_key, 'uri_secret')
     # Backward compatibility for initialising without settings
     with warnings.catch_warnings(record=True) as w:
         storage = S3FeedStorage('s3://mybucket/export.csv')
         self.assertEqual(storage.access_key, 'conf_key')
         self.assertEqual(storage.secret_key, 'conf_secret')
         self.assertTrue('without AWS keys' in str(w[-1].message))
コード例 #2
0
ファイル: test_feedexport.py プロジェクト: ArturGaspar/scrapy
 def test_parse_credentials(self):
     try:
         import boto
     except ImportError:
         raise unittest.SkipTest("S3FeedStorage requires boto")
     aws_credentials = {'AWS_ACCESS_KEY_ID': 'settings_key',
                        'AWS_SECRET_ACCESS_KEY': 'settings_secret'}
     crawler = get_crawler(settings_dict=aws_credentials)
     # Instantiate with crawler
     storage = S3FeedStorage.from_crawler(crawler,
                                          's3://mybucket/export.csv')
     self.assertEqual(storage.access_key, 'settings_key')
     self.assertEqual(storage.secret_key, 'settings_secret')
     # Instantiate directly
     storage = S3FeedStorage('s3://mybucket/export.csv',
                             aws_credentials['AWS_ACCESS_KEY_ID'],
                             aws_credentials['AWS_SECRET_ACCESS_KEY'])
     self.assertEqual(storage.access_key, 'settings_key')
     self.assertEqual(storage.secret_key, 'settings_secret')
     # URI priority > settings priority
     storage = S3FeedStorage('s3://uri_key:uri_secret@mybucket/export.csv',
                             aws_credentials['AWS_ACCESS_KEY_ID'],
                             aws_credentials['AWS_SECRET_ACCESS_KEY'])
     self.assertEqual(storage.access_key, 'uri_key')
     self.assertEqual(storage.secret_key, 'uri_secret')
     # Backwards compatibility for initialising without settings
     with warnings.catch_warnings(record=True) as w:
         storage = S3FeedStorage('s3://mybucket/export.csv')
         self.assertEqual(storage.access_key, 'conf_key')
         self.assertEqual(storage.secret_key, 'conf_secret')
         self.assertTrue('without AWS keys' in str(w[-1].message))
コード例 #3
0
 def test_from_crawler_without_acl(self):
     settings = {
         'AWS_ACCESS_KEY_ID': 'access_key',
         'AWS_SECRET_ACCESS_KEY': 'secret_key',
     }
     crawler = get_crawler(settings_dict=settings)
     storage = S3FeedStorage.from_crawler(crawler,
                                          's3://mybucket/export.csv')
     self.assertEqual(storage.access_key, 'access_key')
     self.assertEqual(storage.secret_key, 'secret_key')
     self.assertEqual(storage.acl, None)
コード例 #4
0
ファイル: test_feedexport.py プロジェクト: elacuesta/scrapy
 def test_from_crawler_without_acl(self):
     settings = {
         'AWS_ACCESS_KEY_ID': 'access_key',
         'AWS_SECRET_ACCESS_KEY': 'secret_key',
     }
     crawler = get_crawler(settings_dict=settings)
     storage = S3FeedStorage.from_crawler(
         crawler,
         's3://mybucket/export.csv'
     )
     self.assertEqual(storage.access_key, 'access_key')
     self.assertEqual(storage.secret_key, 'secret_key')
     self.assertEqual(storage.acl, None)