def test_storage_class(self): """Can specify a storage class for GCS objects""" settings = dict(self.settings) settings["storage.storage_class"] = "COLDLINE" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] blob.update_storage_class.assert_called_with("COLDLINE")
def test_storage_class(self): """ Can specify a storage class for GCS objects """ settings = dict(self.settings) settings["storage.storage_class"] = "COLDLINE" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] blob.update_storage_class.assert_called_with("COLDLINE")
def test_object_acl(self): """Can specify an object ACL for GCS objects. Just test to make sure that the configured ACL is forwarded to the API client """ settings = dict(self.settings) settings["storage.object_acl"] = "authenticated-read" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] self.assertEqual(blob._acl, "authenticated-read")
def test_object_acl(self): """ Can specify an object ACL for GCS objects. Just test to make sure that the configured ACL is forwarded to the API client """ settings = dict(self.settings) settings["storage.object_acl"] = "authenticated-read" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] self.assertEqual(blob._acl, "authenticated-read")
class TestGoogleCloudStorage(unittest.TestCase): """Tests for storing packages in GoogleCloud""" def setUp(self): super(TestGoogleCloudStorage, self).setUp() self.gcs = MockGCSClient() self._config_file = tempfile.mktemp() with open(self._config_file, "w") as ofile: json.dump({}, ofile) patch("google.cloud.storage.Client", self.gcs).start() self.settings = { "storage.bucket": "mybucket", "storage.gcp_service_account_json_filename": self._config_file, } self.bucket = self.gcs.bucket("mybucket") self.bucket._created = True patch.object(GoogleCloudStorage, "test", True).start() kwargs = GoogleCloudStorage.configure(self.settings) self.storage = GoogleCloudStorage(MagicMock(), **kwargs) def tearDown(self): super(TestGoogleCloudStorage, self).tearDown() patch.stopall() os.remove(self._config_file) def test_list(self): """Can construct a package from a GoogleCloudStorage Blob""" name, version, filename, summary = "mypkg", "1.2", "pkg.tar.gz", "text" blob = self.bucket.blob(name + "/" + filename) blob.metadata = {"name": name, "version": version, "summary": summary} blob.upload_from_string("foobar") package = list(self.storage.list(Package))[0] self.assertEqual(package.name, name) self.assertEqual(package.version, version) self.assertEqual(package.filename, filename) self.assertEqual(package.summary, summary) self.gcs.bucket.assert_called_with("mybucket") self.bucket.list_blobs.assert_called_with(prefix=None) self.assertEqual(self.bucket.create.call_count, 0) def test_get_url(self): """Mock gcs and test package url generation""" package = make_package() response = self.storage.download_response(package) parts = urlparse(response.location) self.assertEqual(parts.scheme, "https") self.assertEqual(parts.hostname, "storage.googleapis.com") self.assertEqual(parts.path, "/mybucket/" + self.storage.get_path(package)) query = parse_qs(parts.query) self.assertItemsEqual(query.keys(), ["Expires", "Signature", "GoogleAccessId"]) self.assertTrue(int(query["Expires"][0]) > time.time()) def test_delete(self): """delete() should remove package from storage""" package = make_package() self.storage.upload(package, BytesIO()) self.storage.delete(package) keys = [blob.name for blob in self.bucket.list_blobs()] self.assertEqual(len(keys), 0) def test_upload(self): """Uploading package sets metadata and sends to S3""" package = make_package(requires_python="3.6") datastr = b"foobar" data = BytesIO(datastr) self.storage.upload(package, data) blob = self.bucket.list_blobs()[0] blob.upload_from_file.assert_called_with(data, predefined_acl=None) self.assertEqual(blob._content, datastr) self.assertEqual(blob.metadata["name"], package.name) self.assertEqual(blob.metadata["version"], package.version) self.assertDictContainsSubset(package.get_metadata(), blob.metadata) self.assertEqual(self.bucket.create.call_count, 0) def test_upload_prepend_hash(self): """If prepend_hash = True, attach a hash to the file path""" self.storage.prepend_hash = True package = make_package() data = BytesIO() self.storage.upload(package, data) blob = self.bucket.list_blobs()[0] pattern = r"^[0-9a-f]{4}/%s/%s$" % ( re.escape(package.name), re.escape(package.filename), ) match = re.match(pattern, blob.name) self.assertIsNotNone(match) def test_create_bucket(self): """If GCS bucket doesn't exist, create it""" settings = { "storage.bucket": "new_bucket", "storage.region_name": "us-east-1", "storage.gcp_service_account_json_filename": self._config_file, } arguments = GoogleCloudStorage.configure(settings) arguments["bucket_factory"]() self.gcs.bucket.assert_called_with("new_bucket") bucket = self.gcs.bucket("new_bucket") bucket.create.assert_called_once_with() def test_object_acl(self): """Can specify an object ACL for GCS objects. Just test to make sure that the configured ACL is forwarded to the API client """ settings = dict(self.settings) settings["storage.object_acl"] = "authenticated-read" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] self.assertEqual(blob._acl, "authenticated-read") def test_storage_class(self): """Can specify a storage class for GCS objects""" settings = dict(self.settings) settings["storage.storage_class"] = "COLDLINE" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] blob.update_storage_class.assert_called_with("COLDLINE") def test_client_without_credentials(self): """Can create a client without passing in application credentials""" kwargs = GoogleCloudStorage.configure({ "storage.bucket": "new_bucket", "storage.region_name": "us-east-1" }) GoogleCloudStorage(MagicMock(), **kwargs)
class TestGoogleCloudStorage(unittest.TestCase): """ Tests for storing packages in GoogleCloud """ def setUp(self): super(TestGoogleCloudStorage, self).setUp() self.gcs = MockGCSClient() patch("google.cloud.storage.Client", self.gcs).start() self.settings = { "storage.bucket": "mybucket", "storage.gcp_service_account_json_filename": "my-filename.json", } self.bucket = self.gcs.bucket("mybucket") self.bucket._created = True patch.object(GoogleCloudStorage, "test", True).start() kwargs = GoogleCloudStorage.configure(self.settings) self.storage = GoogleCloudStorage(MagicMock(), **kwargs) def tearDown(self): super(TestGoogleCloudStorage, self).tearDown() patch.stopall() def test_list(self): """ Can construct a package from a GoogleCloudStorage Blob """ name, version, filename, summary = "mypkg", "1.2", "pkg.tar.gz", "text" blob = self.bucket.blob(name + "/" + filename) blob.metadata = {"name": name, "version": version, "summary": summary} blob.upload_from_string("foobar") package = list(self.storage.list(Package))[0] self.assertEqual(package.name, name) self.assertEqual(package.version, version) self.assertEqual(package.filename, filename) self.assertEqual(package.summary, summary) self.gcs.bucket.assert_called_with("mybucket") self.bucket.list_blobs.assert_called_with(prefix=None) self.assertEqual(self.bucket.create.call_count, 0) def test_get_url(self): """ Mock gcs and test package url generation """ package = make_package() response = self.storage.download_response(package) parts = urlparse(response.location) self.assertEqual(parts.scheme, "https") self.assertEqual(parts.hostname, "storage.googleapis.com") self.assertEqual(parts.path, "/mybucket/" + self.storage.get_path(package)) query = parse_qs(parts.query) self.assertItemsEqual(query.keys(), ["Expires", "Signature", "GoogleAccessId"]) self.assertTrue(int(query["Expires"][0]) > time.time()) def test_delete(self): """ delete() should remove package from storage """ package = make_package() self.storage.upload(package, BytesIO()) self.storage.delete(package) keys = [blob.name for blob in self.bucket.list_blobs()] self.assertEqual(len(keys), 0) def test_upload(self): """ Uploading package sets metadata and sends to S3 """ package = make_package() datastr = b"foobar" data = BytesIO(datastr) self.storage.upload(package, data) blob = self.bucket.list_blobs()[0] blob.upload_from_file.assert_called_with(data, predefined_acl=None) self.assertEqual(blob._content, datastr) self.assertEqual(blob.metadata["name"], package.name) self.assertEqual(blob.metadata["version"], package.version) self.assertEqual(blob.metadata["summary"], package.summary) self.assertEqual(self.bucket.create.call_count, 0) def test_upload_prepend_hash(self): """ If prepend_hash = True, attach a hash to the file path """ self.storage.prepend_hash = True package = make_package() data = BytesIO() self.storage.upload(package, data) blob = self.bucket.list_blobs()[0] pattern = r"^[0-9a-f]{4}/%s/%s$" % ( re.escape(package.name), re.escape(package.filename), ) match = re.match(pattern, blob.name) self.assertIsNotNone(match) def test_create_bucket(self): """ If GCS bucket doesn't exist, create it """ settings = { "storage.bucket": "new_bucket", "storage.region_name": "us-east-1", "storage.gcp_service_account_json_filename": "my-filename.json", } storage = GoogleCloudStorage.configure(settings) self.gcs.bucket.assert_called_with("new_bucket") bucket = self.gcs.bucket("new_bucket") bucket.create.assert_called_once_with() def test_object_acl(self): """ Can specify an object ACL for GCS objects. Just test to make sure that the configured ACL is forwarded to the API client """ settings = dict(self.settings) settings["storage.object_acl"] = "authenticated-read" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] self.assertEqual(blob._acl, "authenticated-read") def test_storage_class(self): """ Can specify a storage class for GCS objects """ settings = dict(self.settings) settings["storage.storage_class"] = "COLDLINE" kwargs = GoogleCloudStorage.configure(settings) storage = GoogleCloudStorage(MagicMock(), **kwargs) package = make_package() storage.upload(package, BytesIO()) blob = self.bucket.list_blobs()[0] blob.update_storage_class.assert_called_with("COLDLINE") def test_fail_on_missing_auth(self): """ Raise an exception when loading settings for GoogleCloudStorage and no authentication information is found """ settings = {"storage.bucket": "new_bucket", "storage.region_name": "us-east-1"} with self.assertRaises(Exception): GoogleCloudStorage.configure(settings)