コード例 #1
0
 def test_persist(self):
     assert_aws_environ()
     uri = os.environ.get('S3_TEST_FILE_URI')
     if not uri:
         raise unittest.SkipTest("No S3 URI available for testing")
     data = b"TestS3FilesStore: \xe2\x98\x83"
     buf = BytesIO(data)
     meta = {'foo': 'bar'}
     path = ''
     store = S3FilesStore(uri)
     yield store.persist_file(
         path, buf, info=None, meta=meta,
         headers={'Content-Type': 'image/png'})
     s = yield store.stat_file(path, info=None)
     self.assertIn('last_modified', s)
     self.assertIn('checksum', s)
     self.assertEqual(s['checksum'], '3187896a9657a28163abb31667df64c8')
     u = urlparse(uri)
     content, key = get_s3_content_and_delete(
         u.hostname, u.path[1:], with_key=True)
     self.assertEqual(content, data)
     if is_botocore():
         self.assertEqual(key['Metadata'], {'foo': 'bar'})
         self.assertEqual(
             key['CacheControl'], S3FilesStore.HEADERS['Cache-Control'])
         self.assertEqual(key['ContentType'], 'image/png')
     else:
         self.assertEqual(key.metadata, {'foo': 'bar'})
         self.assertEqual(
             key.cache_control, S3FilesStore.HEADERS['Cache-Control'])
         self.assertEqual(key.content_type, 'image/png')
コード例 #2
0
ファイル: test_pipeline_files.py プロジェクト: zmunk/scrapy
 def test_persist(self):
     assert_aws_environ()
     uri = os.environ.get('S3_TEST_FILE_URI')
     if not uri:
         raise unittest.SkipTest("No S3 URI available for testing")
     data = b"TestS3FilesStore: \xe2\x98\x83"
     buf = BytesIO(data)
     meta = {'foo': 'bar'}
     path = ''
     store = S3FilesStore(uri)
     yield store.persist_file(path,
                              buf,
                              info=None,
                              meta=meta,
                              headers={'Content-Type': 'image/png'})
     s = yield store.stat_file(path, info=None)
     self.assertIn('last_modified', s)
     self.assertIn('checksum', s)
     self.assertEqual(s['checksum'], '3187896a9657a28163abb31667df64c8')
     u = urlparse(uri)
     content, key = get_s3_content_and_delete(u.hostname,
                                              u.path[1:],
                                              with_key=True)
     self.assertEqual(content, data)
     if is_botocore():
         self.assertEqual(key['Metadata'], {'foo': 'bar'})
         self.assertEqual(key['CacheControl'],
                          S3FilesStore.HEADERS['Cache-Control'])
         self.assertEqual(key['ContentType'], 'image/png')
     else:
         self.assertEqual(key.metadata, {'foo': 'bar'})
         self.assertEqual(key.cache_control,
                          S3FilesStore.HEADERS['Cache-Control'])
         self.assertEqual(key.content_type, 'image/png')
コード例 #3
0
ファイル: test_pipeline_files.py プロジェクト: lopuhin/scrapy
 def test_persist(self):
     assert_aws_environ()
     uri = os.environ.get("S3_TEST_FILE_URI")
     if not uri:
         raise unittest.SkipTest("No S3 URI available for testing")
     data = b"TestS3FilesStore: \xe2\x98\x83"
     buf = BytesIO(data)
     meta = {"foo": "bar"}
     path = ""
     store = S3FilesStore(uri)
     yield store.persist_file(path, buf, info=None, meta=meta, headers={"Content-Type": "image/png"})
     s = yield store.stat_file(path, info=None)
     self.assertIn("last_modified", s)
     self.assertIn("checksum", s)
     self.assertEqual(s["checksum"], "3187896a9657a28163abb31667df64c8")
     u = urlparse(uri)
     content, key = get_s3_content_and_delete(u.hostname, u.path[1:], with_key=True)
     self.assertEqual(content, data)
     if is_botocore():
         self.assertEqual(key["Metadata"], {"foo": "bar"})
         self.assertEqual(key["CacheControl"], S3FilesStore.HEADERS["Cache-Control"])
         self.assertEqual(key["ContentType"], "image/png")
     else:
         self.assertEqual(key.metadata, {"foo": "bar"})
         self.assertEqual(key.cache_control, S3FilesStore.HEADERS["Cache-Control"])
         self.assertEqual(key.content_type, "image/png")
コード例 #4
0
ファイル: test_feedexport.py プロジェクト: CPoirot3/scrapy
 def test_store(self):
     assert_aws_environ()
     uri = os.environ.get('S3_TEST_FILE_URI')
     if not uri:
         raise unittest.SkipTest("No S3 URI available for testing")
     storage = S3FeedStorage(uri)
     verifyObject(IFeedStorage, storage)
     file = storage.open(scrapy.Spider("default"))
     expected_content = b"content: \xe2\x98\x83"
     file.write(expected_content)
     yield storage.store(file)
     u = urlparse(uri)
     content = get_s3_content_and_delete(u.hostname, u.path[1:])
     self.assertEqual(content, expected_content)
コード例 #5
0
 def test_store(self):
     assert_aws_environ()
     uri = os.environ.get('S3_TEST_FILE_URI')
     if not uri:
         raise unittest.SkipTest("No S3 URI available for testing")
     storage = S3FeedStorage(uri)
     verifyObject(IFeedStorage, storage)
     file = storage.open(scrapy.Spider("default"))
     expected_content = b"content: \xe2\x98\x83"
     file.write(expected_content)
     yield storage.store(file)
     u = urlparse(uri)
     content = get_s3_content_and_delete(u.hostname, u.path[1:])
     self.assertEqual(content, expected_content)