def test_create_already_exists(self): from google.cloud.exceptions import Conflict from google.cloud.spanner_admin_database_v1 import Backup from google.cloud.spanner_admin_database_v1 import CreateBackupRequest client = _Client() api = client.database_admin_api = self._make_database_admin_api() api.create_backup.side_effect = Conflict("testing") instance = _Instance(self.INSTANCE_NAME, client=client) timestamp = self._make_timestamp() backup = self._make_one( self.BACKUP_ID, instance, database=self.DATABASE_NAME, expire_time=timestamp ) backup_pb = Backup( database=self.DATABASE_NAME, expire_time=timestamp, ) with self.assertRaises(Conflict): backup.create() request = CreateBackupRequest( parent=self.INSTANCE_NAME, backup_id=self.BACKUP_ID, backup=backup_pb, ) api.create_backup.assert_called_once_with( request=request, metadata=[("google-cloud-resource-prefix", backup.name)], )
def snapshot_create(self, snapshot_path, subscription_path): """API call: create a snapshot See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create :type snapshot_path: str :param snapshot_path: fully-qualified path of the snapshot, in format ``projects/<PROJECT>/snapshots/<SNAPSHOT_NAME>``. :type subscription_path: str :param subscription_path: fully-qualified path of the subscrption that the new snapshot captures, in format ``projects/<PROJECT>/subscription/<SNAPSHOT_NAME>``. :rtype: dict :returns: ``Snapshot`` resource returned from the API. :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot already exists :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription does not exist """ try: snapshot_pb = self._gax_api.create_snapshot( snapshot_path, subscription_path) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(snapshot_path) elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(subscription_path) raise return MessageToDict(snapshot_pb)
def sink_create(self, project, sink_name, filter_, destination): """API call: create a sink resource. See: https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. :type sink_name: str :param sink_name: the name of the sink :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. :type destination: str :param destination: destination URI for the entries exported by the sink. """ options = None parent = 'projects/%s' % (project, ) sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) try: self._gax_api.create_sink(parent, sink_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/sinks/%s' % (project, sink_name) raise Conflict(path) raise
def test_create_bucket_w_conflict(self): from google.cloud.exceptions import Conflict project = "PROJECT" user_project = "USER_PROJECT" other_project = "OTHER_PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) connection = _make_connection() client._base_connection = connection connection.api_request.side_effect = Conflict("testing") bucket_name = "bucket-name" data = {"name": bucket_name} with self.assertRaises(Conflict): client.create_bucket( bucket_name, project=other_project, user_project=user_project ) connection.api_request.assert_called_once_with( method="POST", path="/b", query_params={"project": other_project, "userProject": user_project}, data=data, _target_object=mock.ANY, timeout=self._get_default_timeout(), )
def job_starter(job): try: job.begin() except Conflict as e: if not e.message.startswith('Already Exists'): raise Conflict(e.message) else: # do nothing, job already started job_tmp = copy.deepcopy(job) job_reloader(job) if job.query != job_tmp.query or \ (job.destination is None and job_tmp.destination is not None) or \ (job.destination is not None and job_tmp.destination is None) or \ ((job.destination is not None and job_tmp.destination is not None) and (job.destination.project != job_tmp.destination.project or job.destination.dataset_name != job_tmp.destination.dataset_name or job.destination.name != job_tmp.destination.name)): raise ValueError( "Job {} already exists but with different properties than: " "query ({}), dest. project ({}), dest. dataset ({}), dest. name ({})" .format( job_tmp.name, job_tmp.query, job_tmp.destination.project if job_tmp.destination is not None else None, job_tmp.destination.dataset_name if job_tmp.destination is not None else None, job_tmp.destination.name if job_tmp.destination is not None else None))
def create(self): """Create this database within its instance Inclues any configured schema assigned to :attr:`ddl_statements`. See https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase """ api = self._instance._client.database_admin_api options = _options_with_prefix(self.name) db_name = self.database_id if '-' in db_name: db_name = '`%s`' % (db_name, ) try: future = api.create_database( parent=self._instance.name, create_statement='CREATE DATABASE %s' % (db_name, ), extra_statements=list(self._ddl_statements), options=options, ) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.ALREADY_EXISTS: raise Conflict(self.name) elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound('Instance not found: {name}'.format( name=self._instance.name, )) raise future.caller_metadata = {'request_type': 'CreateDatabase'} return future
def metric_create(self, project, metric_name, filter_, description): """API call: create a metric resource. See: https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. :type metric_name: str :param metric_name: the name of the metric :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. :type description: str :param description: description of the metric. """ options = None parent = 'projects/%s' % (project, ) metric_pb = LogMetric(name=metric_name, filter=filter_, description=description) try: self._gax_api.create_log_metric(parent, metric_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/metrics/%s' % (project, metric_name) raise Conflict(path) raise
def test_create_already_exists(self): from google.cloud.exceptions import Conflict client = _Client() api = client.database_admin_api = self._make_database_admin_api() api.create_backup.side_effect = Conflict("testing") instance = _Instance(self.INSTANCE_NAME, client=client) timestamp = self._make_timestamp() backup = self._make_one(self.BACKUP_ID, instance, database=self.DATABASE_NAME, expire_time=timestamp) from google.cloud._helpers import _datetime_to_pb_timestamp backup_pb = { "database": self.DATABASE_NAME, "expire_time": _datetime_to_pb_timestamp(timestamp), } with self.assertRaises(Conflict): backup.create() api.create_backup.assert_called_once_with( parent=self.INSTANCE_NAME, backup_id=self.BACKUP_ID, backup=backup_pb, metadata=[("google-cloud-resource-prefix", backup.name)], )
def test_execute_no_force_rerun(self, mock_hook, mock_md5): job_id = "123456" hash_ = "hash" real_job_id = f"{job_id}_{hash_}" mock_md5.return_value.hexdigest.return_value = hash_ configuration = { "query": { "query": "SELECT * FROM any", "useLegacySql": False, } } mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict( "any") job = MagicMock( job_id=real_job_id, error_result=False, state="DONE", done=lambda: True, ) mock_hook.return_value.get_job.return_value = job op = BigQueryInsertJobOperator( task_id="insert_query_job", configuration=configuration, location=TEST_DATASET_LOCATION, job_id=job_id, project_id=TEST_GCP_PROJECT_ID, reattach_states={"PENDING"}, ) # No force rerun with pytest.raises(AirflowException): op.execute({})
def test_create_bucket_raises(mock_client): mock_client.create_bucket.side_effect = Conflict('error') gs = GCS(bucket='foo', gc_credentials_file='foo/bar') with pytest.raises(GCSDestinationError): gs.create_bucket()
def test_create_already_exists(self): from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable_admin_v2.types import table from google.cloud.exceptions import Conflict client = _Client() api = client.table_admin_client = self._make_table_admin_client() api.create_backup.side_effect = Conflict("testing") timestamp = self._make_timestamp() backup = self._make_one( self.BACKUP_ID, _Instance(self.INSTANCE_NAME, client=client), table_id=self.TABLE_ID, expire_time=timestamp, ) backup_pb = table.Backup( source_table=self.TABLE_NAME, expire_time=_datetime_to_pb_timestamp(timestamp), ) with self.assertRaises(Conflict): backup.create(self.CLUSTER_ID) api.create_backup.assert_called_once_with( request={ "parent": self.CLUSTER_NAME, "backup_id": self.BACKUP_ID, "backup": backup_pb, } )
def upload_file(client, bucket, local_path, remote_path, overwrite=False): """Uploads a file to a bucket. TODO: docstring""" bucket = client.get_bucket(bucket) blob = storage.Blob(remote_path, bucket) if (not overwrite) and blob.exists(): raise Conflict('File/object already exists on the bucket!') blob.upload_from_filename(local_path)
def create_bucket(self, bucket_or_name, requester_pays=None, project=None): bucket = self._bucket_arg_to_bucket(bucket_or_name) # bucket.create(client=self, project=project) if bucket.name in self.backend.buckets.keys(): raise Conflict( "409 POST https://storage.googleapis.com/storage/v1/b?project={}: You already own this bucket. Please select another name." .format(self.project)) else: self.backend.buckets[bucket.name] = bucket return bucket
def test_exists_no_bucket_auto_create(self): # exists('') should return true when auto_create_bucket is configured # and bucket already exists # exists('') should automatically create the bucket if # auto_create_bucket is configured self.storage.auto_create_bucket = True self.storage._client = mock.MagicMock() self.storage._client.create_bucket.side_effect = Conflict('dang') self.assertTrue(self.storage.exists(''))
def test_create_conflict(self): from google.cloud.exceptions import Conflict from google.cloud.runtimeconfig.config import Config conn = _Connection(Conflict("test")) client = _Client(project=self.PROJECT, connection=conn) config = Config(name=self.CONFIG_NAME, client=client) variable = config.variable(self.VARIABLE_NAME) variable.text = "foo" self.assertFalse(variable.create())
def api_request(self, **kw): from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound self._called_with = kw if self._raise_conflict: raise Conflict('oops') try: response, self._responses = self._responses[0], self._responses[1:] except IndexError: raise NotFound('miss') return response
def sink_create(self, project, sink_name, filter_, destination, unique_writer_identity=False): """API call: create a sink resource. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create :type project: str :param project: ID of the project in which to create the sink. :type sink_name: str :param sink_name: the name of the sink :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the sink. :type destination: str :param destination: destination URI for the entries exported by the sink. :type unique_writer_identity: bool :param unique_writer_identity: (Optional) determines the kind of IAM identity returned as writer_identity in the new sink. :rtype: dict :returns: The sink resource returned from the API (converted from a protobuf to a dictionary). """ options = None parent = 'projects/%s' % (project, ) sink_pb = LogSink(name=sink_name, filter=filter_, destination=destination) try: created_pb = self._gax_api.create_sink( parent, sink_pb, unique_writer_identity=unique_writer_identity, options=options, ) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: path = 'projects/%s/sinks/%s' % (project, sink_name) raise Conflict(path) raise return MessageToDict(created_pb)
def subscription_create(self, subscription_path, topic_path, ack_deadline=None, push_endpoint=None): """API call: create a subscription See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects/<PROJECT>/subscriptions/<SUB_NAME>``. :type topic_path: str :param topic_path: the fully-qualified path of the topic being subscribed, in format ``projects/<PROJECT>/topics/<TOPIC_NAME>``. :type ack_deadline: int :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. :type push_endpoint: str :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. :rtype: dict :returns: ``Subscription`` resource returned from the API. """ if push_endpoint is not None: push_config = PushConfig(push_endpoint=push_endpoint) else: push_config = None if ack_deadline is None: ack_deadline = 0 try: sub_pb = self._gax_api.create_subscription( subscription_path, topic_path, push_config=push_config, ack_deadline_seconds=ack_deadline) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) raise return MessageToDict(sub_pb)
def test_execute_idempotency(self, mock_hook, mock_sleep_generator): job_id = "123456" configuration = { "query": { "query": "SELECT * FROM any", "useLegacySql": False, } } class MockJob: _call_no = 0 _done = False def __init__(self): pass def reload(self): if MockJob._call_no == 3: MockJob._done = True else: MockJob._call_no += 1 def done(self): return MockJob._done @property def job_id(self): return job_id mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict( "any") mock_sleep_generator.return_value = [0, 0, 0, 0, 0] mock_hook.return_value.get_job.return_value = MockJob() op = BigQueryInsertJobOperator(task_id="insert_query_job", configuration=configuration, location=TEST_DATASET_LOCATION, job_id=job_id, project_id=TEST_GCP_PROJECT_ID) result = op.execute({}) assert MockJob._call_no == 3 mock_hook.return_value.get_job.assert_called_once_with( location=TEST_DATASET_LOCATION, job_id=job_id, project_id=TEST_GCP_PROJECT_ID, ) assert result == job_id
def create(self): """Create this instance. See https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.instance.v1#google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance .. note:: Uses the ``project`` and ``instance_id`` on the current :class:`Instance` in addition to the ``display_name``. To change them before creating, reset the values via .. code:: python instance.display_name = 'New display name' instance.instance_id = 'i-changed-my-mind' before calling :meth:`create`. :rtype: :class:`google.api_core.operation.Operation` :returns: an operation instance :raises Conflict: if the instance already exists :raises GaxError: for errors other than ``ALREADY_EXISTS`` returned from the call """ api = self._client.instance_admin_api instance_pb = admin_v1_pb2.Instance( name=self.name, config=self.configuration_name, display_name=self.display_name, node_count=self.node_count, ) options = _options_with_prefix(self.name) try: future = api.create_instance( parent=self._client.project_name, instance_id=self.instance_id, instance=instance_pb, options=options, ) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.ALREADY_EXISTS: raise Conflict(self.name) raise return future
def test_execute_reattach(self, mock_hook, mock_md5): job_id = "123456" hash_ = "hash" real_job_id = f"{job_id}_{hash_}" mock_md5.return_value.hexdigest.return_value = hash_ configuration = { "query": { "query": "SELECT * FROM any", "useLegacySql": False, } } mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict( "any") job = MagicMock( job_id=real_job_id, error_result=False, state="PENDING", done=lambda: False, ) mock_hook.return_value.get_job.return_value = job op = BigQueryInsertJobOperator( task_id="insert_query_job", configuration=configuration, location=TEST_DATASET_LOCATION, job_id=job_id, project_id=TEST_GCP_PROJECT_ID, reattach_states={"PENDING"}, ) result = op.execute({}) mock_hook.return_value.get_job.assert_called_once_with( location=TEST_DATASET_LOCATION, job_id=real_job_id, project_id=TEST_GCP_PROJECT_ID, ) job.result.assert_called_once_with() assert result == real_job_id
def commit(self, project, request_pb): """Perform a ``commit`` request. :type project: string :param project: The project to connect to. This is usually your project name in the cloud console. :type request_pb: :class:`._generated.datastore_pb2.CommitRequest` :param request_pb: The request protobuf object. :rtype: :class:`._generated.datastore_pb2.CommitResponse` :returns: The returned protobuf response object. """ request_pb.project_id = project try: return self._stub.Commit(request_pb) except GrpcRendezvous as exc: if exc.code() == StatusCode.ABORTED: raise Conflict(exc.details()) raise
def topic_create(self, topic_path): """API call: create a topic See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create :type topic_path: str :param topic_path: fully-qualified path of the new topic, in format ``projects/<PROJECT>/topics/<TOPIC_NAME>``. :rtype: dict :returns: ``Topic`` resource returned from the API. :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already exists """ try: topic_pb = self._gax_api.create_topic(topic_path) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) raise return {'name': topic_pb.name}
def create(self): """Create this database within its instance Inclues any configured schema assigned to :attr:`ddl_statements`. See https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase :rtype: :class:`~google.cloud.future.operation.Operation` :returns: a future used to poll the status of the create request :raises Conflict: if the database already exists :raises NotFound: if the instance owning the database does not exist :raises GaxError: for errors other than ``ALREADY_EXISTS`` returned from the call """ api = self._instance._client.database_admin_api options = _options_with_prefix(self.name) db_name = self.database_id if '-' in db_name: db_name = '`%s`' % (db_name,) try: future = api.create_database( parent=self._instance.name, create_statement='CREATE DATABASE %s' % (db_name,), extra_statements=list(self._ddl_statements), options=options, ) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.ALREADY_EXISTS: raise Conflict(self.name) elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound('Instance not found: {name}'.format( name=self._instance.name, )) raise return future
def subscription_create(self, subscription_path, topic_path, ack_deadline=None, push_endpoint=None, retain_acked_messages=None, message_retention_duration=None): """API call: create a subscription See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects/<PROJECT>/subscriptions/<SUB_NAME>``. :type topic_path: str :param topic_path: the fully-qualified path of the topic being subscribed, in format ``projects/<PROJECT>/topics/<TOPIC_NAME>``. :type ack_deadline: int :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. :type push_endpoint: str :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. :type retain_acked_messages: bool :param retain_acked_messages: (Optional) Whether to retain acked messages. If set, acked messages are retained in the subscription's backlog for a duration indicated by `message_retention_duration`. :type message_retention_duration: :class:`datetime.timedelta` :param message_retention_duration: (Optional) Whether to retain acked messages. If set, acked messages are retained in the subscription's backlog for a duration indicated by `message_retention_duration`. If unset, defaults to 7 days. :rtype: dict :returns: ``Subscription`` resource returned from the API. """ if push_endpoint is not None: push_config = PushConfig(push_endpoint=push_endpoint) else: push_config = None if message_retention_duration is not None: message_retention_duration = _timedelta_to_duration_pb( message_retention_duration) try: sub_pb = self._gax_api.create_subscription( subscription_path, topic_path, push_config=push_config, ack_deadline_seconds=ack_deadline, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) raise return MessageToDict(sub_pb)
def create_bucket(self, bucket_name: str) -> 'FakeGcsBucket': if bucket_name in self._namespace: raise Conflict('Bucket {} already exists.'.format(bucket_name)) self._namespace[bucket_name] = {} return self._init_bucket(bucket_name)