def __init__(self, bucket=None): super(GoogleStorageArchive, self).__init__(bucket) self.client = Client() log.info("Archive: gs://%s", bucket) self.bucket = self.client.lookup_bucket(bucket) if self.bucket is None: self.bucket = self.client.create_bucket(bucket) policy = { "origin": ['*'], "method": ['GET'], "responseHeader": [ 'Accept-Ranges', 'Content-Encoding', 'Content-Length', 'Content-Range' ], "maxAgeSeconds": self.TIMEOUT } self.bucket.cors = [policy] self.bucket.update()
def test_as_context_mgr_wo_error(self): from google.cloud.storage.client import Client URL = 'http://example.com/api' expected = _Response() expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _THREE_PART_MIME_RESPONSE)) project = 'PROJECT' credentials = _make_credentials() client = Client(project=project, credentials=credentials) client._http_internal = http self.assertEqual(list(client._batch_stack), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() with self._make_one(client) as batch: self.assertEqual(list(client._batch_stack), [batch]) batch._make_request('POST', URL, { 'foo': 1, 'bar': 2 }, target_object=target1) batch._make_request('PATCH', URL, {'bar': 3}, target_object=target2) batch._make_request('DELETE', URL, target_object=target3) self.assertEqual(list(client._batch_stack), []) self.assertEqual(len(batch._requests), 3) self.assertEqual(batch._requests[0][0], 'POST') self.assertEqual(batch._requests[1][0], 'PATCH') self.assertEqual(batch._requests[2][0], 'DELETE') self.assertEqual(batch._target_objects, [target1, target2, target3]) self.assertEqual(target1._properties, {'foo': 1, 'bar': 2}) self.assertEqual(target2._properties, {'foo': 1, 'bar': 3}) self.assertEqual(target3._properties, '')
def test_as_context_mgr_w_error(self): from google.cloud.storage.batch import _FutureDict from google.cloud.storage.client import Client URL = 'http://example.com/api' http = _make_requests_session([]) connection = _Connection(http=http) project = 'PROJECT' credentials = _make_credentials() client = Client(project=project, credentials=credentials) client._base_connection = connection self.assertEqual(list(client._batch_stack), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() try: with self._make_one(client) as batch: self.assertEqual(list(client._batch_stack), [batch]) batch._make_request('POST', URL, {'foo': 1, 'bar': 2}, target_object=target1) batch._make_request('PATCH', URL, {'bar': 3}, target_object=target2) batch._make_request('DELETE', URL, target_object=target3) raise ValueError() except ValueError: pass http.request.assert_not_called() self.assertEqual(list(client._batch_stack), []) self.assertEqual(len(batch._requests), 3) self.assertEqual(batch._target_objects, [target1, target2, target3]) # Since the context manager fails, finish will not get called and # the _properties will still be futures. self.assertIsInstance(target1._properties, _FutureDict) self.assertIsInstance(target2._properties, _FutureDict) self.assertIsInstance(target3._properties, _FutureDict)
def test_create_w_predefined_default_object_acl_valid(self): from google.cloud.storage.client import Client PROJECT = "PROJECT" BUCKET_NAME = "bucket-name" DATA = {"name": BUCKET_NAME} client = Client(project=PROJECT) connection = _make_connection(DATA) client._base_connection = connection bucket = client.create_bucket( BUCKET_NAME, predefined_default_object_acl="publicRead") connection.api_request.assert_called_once_with( method="POST", path="/b", query_params={ "project": PROJECT, "predefinedDefaultObjectAcl": "publicRead", }, data=DATA, _target_object=bucket, )
def sign(duration: str, key_file: click.File, resource: str) -> None: """ Generate a signed URL that embeds authentication data so the URL can be used by someone who does not have a Google account. This tool exists to overcome a shortcoming of gsutil signurl that limits expiration to 7 days only. KEY_FILE should be a path to a JSON file containing service account private key. See gsutil signurl --help for details RESOURCE is a GCS location in the form <bucket>/<path> (don't add neither "gs://" nor "http://...") Example: gcs-signurl /tmp/creds.json /foo-bucket/bar-file.txt """ bucket_name, _, path = resource.lstrip("/").partition("/") creds = service_account.Credentials.from_service_account_file( key_file.name) till = datetime.now() + _DurationToTimeDelta(duration) # Ignoring potential warning about end user credentials. # We don't actually do any operations on the client, but # unfortunately the only public API in google-cloud-storage package # requires building client->bucket->blob message = "Your application has authenticated using end user credentials from Google Cloud SDK" with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=message) client = Client() bucket = Bucket(client, bucket_name) blob = Blob(path, bucket) # Not passing version argument - to support compatibility with # google-cloud-storage<=1.14.0. They default to version 2 and hopefully # will not change it anytime soon. signed_url = blob.generate_signed_url(expiration=till, credentials=creds) click.echo(signed_url)
def test_as_context_mgr_wo_error(self): from google.cloud.storage.client import Client url = "http://example.com/api" expected_response = _make_response( content=_THREE_PART_MIME_RESPONSE, headers={"content-type": 'multipart/mixed; boundary="DEADBEEF="'}, ) http = _make_requests_session([expected_response]) project = "PROJECT" credentials = _make_credentials() client = Client(project=project, credentials=credentials) client._http_internal = http self.assertEqual(list(client._batch_stack), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() with self._make_one(client) as batch: self.assertEqual(list(client._batch_stack), [batch]) batch._make_request( "POST", url, {"foo": 1, "bar": 2}, target_object=target1 ) batch._make_request("PATCH", url, {"bar": 3}, target_object=target2) batch._make_request("DELETE", url, target_object=target3) self.assertEqual(list(client._batch_stack), []) self.assertEqual(len(batch._requests), 3) self.assertEqual(batch._requests[0][0], "POST") self.assertEqual(batch._requests[1][0], "PATCH") self.assertEqual(batch._requests[2][0], "DELETE") self.assertEqual(batch._target_objects, [target1, target2, target3]) self.assertEqual(target1._properties, {"foo": 1, "bar": 2}) self.assertEqual(target2._properties, {"foo": 1, "bar": 3}) self.assertEqual(target3._properties, b"")
def test_create_w_explicit_location(self): from google.cloud.storage.client import Client PROJECT = "PROJECT" BUCKET_NAME = "bucket-name" LOCATION = "us-central1" DATA = {"location": LOCATION, "name": BUCKET_NAME} connection = _make_connection( DATA, "{'location': 'us-central1', 'name': 'bucket-name'}") client = Client(project=PROJECT) client._base_connection = connection bucket = client.create_bucket(BUCKET_NAME, location=LOCATION) connection.api_request.assert_called_once_with( method="POST", path="/b", data=DATA, _target_object=bucket, query_params={"project": "PROJECT"}, ) self.assertEqual(bucket.location, LOCATION)
def post_food_recommender(request): """Get existing food recommender list.""" data = request.get_json()['data'] list_id = request.args.get('listId') or str(uuid.uuid4()) data['listId'] = list_id if os.getenv('ENV', 'production') == 'local': expected_path = (Path(tempfile.gettempdir()) / 'jmyrberg-food-recommender' / 'data' / f'{list_id}.json') expected_path.parent.mkdir(exist_ok=True, parents=True) with open(expected_path, 'w') as f: json.dump(data, f) return { 'status': 'success', 'message': 'Food recommender list saved successfully', 'data': { 'listId': list_id } }, 200 else: global storage_client if not storage_client: storage_client = Client() bucket_name = os.getenv('FOOD_RECOMMENDER_BUCKET_NAME', 'jmyrberg-food-recommender') blob_name = f'/data/{list_id}.json' new_blob = storage_client.bucket(bucket_name).blob(blob_name) new_blob.upload_from_string(json.dumps(data)) return { 'status': 'success', 'message': 'Food recommender list saved successfully', 'data': { 'listId': list_id } }, 200
def _make_one(self, project, credentials): from google.cloud.storage.client import Client return Client(project=project, credentials=credentials)
def __init__(self, *args, **kwargs): self.client = Client(**kwargs)
def client(self): if self._client is None: self._client = Client(project=self.project_id, credentials=self.credentials) return self._client
def client(self) -> Client: if self._client is None: client = Client() self._client = client return self._client
def __init__(self, bucket_name): storage_client = Client() self.bucket = storage_client.bucket(bucket_name)
def client(self): if not hasattr(self.local, "client"): self.local.client = Client() return self.local.client
def get_gcs_blob(gcp_project, remote_prefix, remote_relative_path): absolute_remote_path = remote_prefix + remote_relative_path res = urllib.parse.urlsplit(absolute_remote_path) rel_path = res.path[1:] blob = Client(gcp_project).bucket(res.netloc).blob(rel_path) return blob
def client(): return Client()