Esempio n. 1
0
def _restart_on_unavailable(method,
                            request,
                            trace_name=None,
                            session=None,
                            attributes=None):
    """Restart iteration after :exc:`.ServiceUnavailable`.

    :type method: callable
    :param method: function returning iterator

    :type request: proto
    :param request: request proto to call the method with
    """
    resume_token = b""
    item_buffer = []
    with trace_call(trace_name, session, attributes):
        iterator = method(request=request)
    while True:
        try:
            for item in iterator:
                item_buffer.append(item)
                if item.resume_token:
                    resume_token = item.resume_token
                    break
        except ServiceUnavailable:
            del item_buffer[:]
            with trace_call(trace_name, session, attributes):
                request.resume_token = resume_token
                iterator = method(request=request)
            continue
        except InternalServerError as exc:
            resumable_error = any(resumable_message in exc.message
                                  for resumable_message in
                                  _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES)
            if not resumable_error:
                raise
            del item_buffer[:]
            with trace_call(trace_name, session, attributes):
                request.resume_token = resume_token
                iterator = method(request=request)
            continue

        if len(item_buffer) == 0:
            break

        for item in item_buffer:
            yield item

        del item_buffer[:]
Esempio n. 2
0
    def commit(self, return_commit_stats=False):
        """Commit mutations to the database.

        :type return_commit_stats: bool
        :param return_commit_stats:
          If true, the response will return commit stats which can be accessed though commit_stats.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        """
        self._check_state()
        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        txn_options = TransactionOptions(
            read_write=TransactionOptions.ReadWrite())
        trace_attributes = {"num_mutations": len(self._mutations)}
        request = CommitRequest(
            session=self._session.name,
            mutations=self._mutations,
            single_use_transaction=txn_options,
            return_commit_stats=return_commit_stats,
        )
        with trace_call("CloudSpanner.Commit", self._session,
                        trace_attributes):
            response = api.commit(
                request=request,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        self.commit_stats = response.commit_stats
        return self.committed
Esempio n. 3
0
    def begin(self):
        """Begin a transaction on the database.

        :rtype: bytes
        :returns: the ID for the newly-begun transaction.
        :raises ValueError:
            if the transaction is already begun, committed, or rolled back.
        """
        if self._transaction_id is not None:
            raise ValueError("Transaction already begun")

        if self.committed is not None:
            raise ValueError("Transaction already committed")

        if self.rolled_back:
            raise ValueError("Transaction is already rolled back")

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        txn_options = TransactionOptions(
            read_write=TransactionOptions.ReadWrite())
        with trace_call("CloudSpanner.BeginTransaction", self._session):
            response = api.begin_transaction(session=self._session.name,
                                             options=txn_options,
                                             metadata=metadata)
        self._transaction_id = response.id
        return self._transaction_id
Esempio n. 4
0
        def test_trace_call(self):
            extra_attributes = {
                "attribute1": "value1",
                # Since our database is mocked, we have to override the db.instance parameter so it is a string
                "db.instance": "database_name",
            }

            expected_attributes = {
                "db.type": "spanner",
                "db.url": "spanner.googleapis.com",
                "net.host.name": "spanner.googleapis.com",
            }
            expected_attributes.update(extra_attributes)

            with _opentelemetry_tracing.trace_call(
                "CloudSpanner.Test", _make_session(), extra_attributes
            ) as span:
                span.set_attribute("after_setup_attribute", 1)

            expected_attributes["after_setup_attribute"] = 1

            span_list = self.memory_exporter.get_finished_spans()
            self.assertEqual(len(span_list), 1)
            span = span_list[0]
            self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
            self.assertEqual(span.attributes, expected_attributes)
            self.assertEqual(span.name, "CloudSpanner.Test")
            self.assertEqual(span.status.canonical_code, StatusCanonicalCode.OK)
Esempio n. 5
0
        def test_trace_error(self):
            extra_attributes = {"db.instance": "database_name"}

            expected_attributes = {
                "db.type": "spanner",
                "db.url": "spanner.googleapis.com",
                "net.host.name": "spanner.googleapis.com",
            }
            expected_attributes.update(extra_attributes)

            with self.assertRaises(GoogleAPICallError):
                with _opentelemetry_tracing.trace_call(
                    "CloudSpanner.Test", _make_session(), extra_attributes
                ) as span:
                    from google.api_core.exceptions import InvalidArgument

                    raise _make_rpc_error(InvalidArgument)

            span_list = self.memory_exporter.get_finished_spans()
            self.assertEqual(len(span_list), 1)
            span = span_list[0]
            self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
            self.assertEqual(dict(span.attributes), expected_attributes)
            self.assertEqual(span.name, "CloudSpanner.Test")
            self.assertEqual(
                span.status.canonical_code, StatusCanonicalCode.INVALID_ARGUMENT
            )
Esempio n. 6
0
    def exists(self):
        """Test for the existence of this session.

        See
        https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.Spanner.GetSession

        :rtype: bool
        :returns: True if the session exists on the back-end, else False.
        """
        if self._session_id is None:
            return False
        api = self._database.spanner_api
        metadata = _metadata_with_prefix(self._database.name)

        with trace_call("CloudSpanner.GetSession", self) as span:
            try:
                api.get_session(name=self.name, metadata=metadata)
                if span:
                    span.set_attribute("session_found", True)
            except NotFound:
                if span:
                    span.set_attribute("session_found", False)
                return False

        return True
Esempio n. 7
0
    def commit(self, return_commit_stats=False):
        """Commit mutations to the database.

        :type return_commit_stats: bool
        :param return_commit_stats:
          If true, the response will return commit stats which can be accessed though commit_stats.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        :raises ValueError: if there are no mutations to commit.
        """
        self._check_state()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        trace_attributes = {"num_mutations": len(self._mutations)}
        request = CommitRequest(
            session=self._session.name,
            mutations=self._mutations,
            transaction_id=self._transaction_id,
            return_commit_stats=return_commit_stats,
        )
        with trace_call("CloudSpanner.Commit", self._session,
                        trace_attributes):
            response = api.commit(
                request=request,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        if return_commit_stats:
            self.commit_stats = response.commit_stats
        del self._session._transaction
        return self.committed
Esempio n. 8
0
    def begin(self):
        """Begin a read-only transaction on the database.

        :rtype: bytes
        :returns: the ID for the newly-begun transaction.

        :raises ValueError:
            if the transaction is already begun, committed, or rolled back.
        """
        if not self._multi_use:
            raise ValueError("Cannot call 'begin' on single-use snapshots")

        if self._transaction_id is not None:
            raise ValueError("Read-only transaction already begun")

        if self._read_request_count > 0:
            raise ValueError("Read-only transaction already pending")

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        txn_selector = self._make_txn_selector()
        with trace_call("CloudSpanner.BeginTransaction", self._session):
            response = api.begin_transaction(
                session=self._session.name,
                options=txn_selector.begin,
                metadata=metadata,
            )
        self._transaction_id = response.id
        return self._transaction_id
Esempio n. 9
0
 def rollback(self):
     """Roll back a transaction on the database."""
     self._check_state()
     database = self._session._database
     api = database.spanner_api
     metadata = _metadata_with_prefix(database.name)
     with trace_call("CloudSpanner.Rollback", self._session):
         api.rollback(self._session.name,
                      self._transaction_id,
                      metadata=metadata)
     self.rolled_back = True
     del self._session._transaction
Esempio n. 10
0
    def commit(self, return_commit_stats=False, request_options=None):
        """Commit mutations to the database.

        :type return_commit_stats: bool
        :param return_commit_stats:
          If true, the response will return commit stats which can be accessed though commit_stats.

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        :raises ValueError: if there are no mutations to commit.
        """
        self._check_state()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        trace_attributes = {"num_mutations": len(self._mutations)}

        if request_options is None:
            request_options = RequestOptions()
        elif type(request_options) == dict:
            request_options = RequestOptions(request_options)
        if self.transaction_tag is not None:
            request_options.transaction_tag = self.transaction_tag

        # Request tags are not supported for commit requests.
        request_options.request_tag = None

        request = CommitRequest(
            session=self._session.name,
            mutations=self._mutations,
            transaction_id=self._transaction_id,
            return_commit_stats=return_commit_stats,
            request_options=request_options,
        )
        with trace_call("CloudSpanner.Commit", self._session, trace_attributes):
            response = api.commit(
                request=request,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        if return_commit_stats:
            self.commit_stats = response.commit_stats
        del self._session._transaction
        return self.committed
Esempio n. 11
0
    def delete(self):
        """Delete this session.

        See
        https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.Spanner.GetSession

        :raises ValueError: if :attr:`session_id` is not already set.
        :raises NotFound: if the session does not exist
        """
        if self._session_id is None:
            raise ValueError("Session ID not set by back-end")
        api = self._database.spanner_api
        metadata = _metadata_with_prefix(self._database.name)
        with trace_call("CloudSpanner.DeleteSession", self):
            api.delete_session(name=self.name, metadata=metadata)
Esempio n. 12
0
    def commit(self, return_commit_stats=False, request_options=None):
        """Commit mutations to the database.

        :type return_commit_stats: bool
        :param return_commit_stats:
          If true, the response will return commit stats which can be accessed though commit_stats.

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        """
        self._check_state()
        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        txn_options = TransactionOptions(
            read_write=TransactionOptions.ReadWrite())
        trace_attributes = {"num_mutations": len(self._mutations)}

        if type(request_options) == dict:
            request_options = RequestOptions(request_options)

        request = CommitRequest(
            session=self._session.name,
            mutations=self._mutations,
            single_use_transaction=txn_options,
            return_commit_stats=return_commit_stats,
            request_options=request_options,
        )
        with trace_call("CloudSpanner.Commit", self._session,
                        trace_attributes):
            response = api.commit(
                request=request,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        self.commit_stats = response.commit_stats
        return self.committed
Esempio n. 13
0
        def test_trace_codeless_error(self):
            extra_attributes = {"db.instance": "database_name"}

            expected_attributes = {
                "db.type": "spanner",
                "db.url": "spanner.googleapis.com:443",
                "net.host.name": "spanner.googleapis.com:443",
            }
            expected_attributes.update(extra_attributes)

            with self.assertRaises(GoogleAPICallError):
                with _opentelemetry_tracing.trace_call(
                    "CloudSpanner.Test", _make_session(), extra_attributes
                ) as span:
                    raise GoogleAPICallError("error")

            span_list = self.memory_exporter.get_finished_spans()
            self.assertEqual(len(span_list), 1)
            span = span_list[0]
            self.assertEqual(span.status.canonical_code, StatusCanonicalCode.UNKNOWN)
Esempio n. 14
0
    def create(self):
        """Create this session, bound to its database.

        See
        https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.Spanner.CreateSession

        :raises: :exc:`ValueError` if :attr:`session_id` is already set.
        """
        if self._session_id is not None:
            raise ValueError("Session ID already set by back-end")
        api = self._database.spanner_api
        metadata = _metadata_with_prefix(self._database.name)
        kw = {}
        if self._labels:
            kw = {"session": {"labels": self._labels}}

        with trace_call("CloudSpanner.CreateSession", self, self._labels):
            session_pb = api.create_session(self._database.name,
                                            metadata=metadata,
                                            **kw)
        self._session_id = session_pb.name.split("/")[-1]
Esempio n. 15
0
    def commit(self):
        """Commit mutations to the database.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        """
        self._check_state()
        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        txn_options = TransactionOptions(read_write=TransactionOptions.ReadWrite())
        trace_attributes = {"num_mutations": len(self._mutations)}
        with trace_call("CloudSpanner.Commit", self._session, trace_attributes):
            response = api.commit(
                session=self._session.name,
                mutations=self._mutations,
                single_use_transaction=txn_options,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        return self.committed
Esempio n. 16
0
    def create(self):
        """Create this session, bound to its database.

        See
        https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.Spanner.CreateSession

        :raises ValueError: if :attr:`session_id` is already set.
        """
        if self._session_id is not None:
            raise ValueError("Session ID already set by back-end")
        api = self._database.spanner_api
        metadata = _metadata_with_prefix(self._database.name)

        request = CreateSessionRequest(database=self._database.name)

        if self._labels:
            request.session.labels = self._labels

        with trace_call("CloudSpanner.CreateSession", self, self._labels):
            session_pb = api.create_session(request=request, metadata=metadata,)
        self._session_id = session_pb.name.split("/")[-1]
Esempio n. 17
0
        def test_trace_grpc_error(self):
            extra_attributes = {"db.instance": "database_name"}

            expected_attributes = {
                "db.type": "spanner",
                "db.url": "spanner.googleapis.com:443",
                "net.host.name": "spanner.googleapis.com:443",
            }
            expected_attributes.update(extra_attributes)

            with self.assertRaises(GoogleAPICallError):
                with _opentelemetry_tracing.trace_call(
                    "CloudSpanner.Test", _make_session(), extra_attributes
                ) as span:
                    from google.api_core.exceptions import DataLoss

                    raise DataLoss("error")

            span_list = self.ot_exporter.get_finished_spans()
            self.assertEqual(len(span_list), 1)
            span = span_list[0]
            self.assertEqual(span.status.status_code, StatusCode.ERROR)
Esempio n. 18
0
    def commit(self):
        """Commit mutations to the database.

        :rtype: datetime
        :returns: timestamp of the committed changes.
        :raises ValueError: if there are no mutations to commit.
        """
        self._check_state()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        trace_attributes = {"num_mutations": len(self._mutations)}
        with trace_call("CloudSpanner.Commit", self._session,
                        trace_attributes):
            response = api.commit(
                session=self._session.name,
                mutations=self._mutations,
                transaction_id=self._transaction_id,
                metadata=metadata,
            )
        self.committed = response.commit_timestamp
        del self._session._transaction
        return self.committed
Esempio n. 19
0
    def execute_update(
        self,
        dml,
        params=None,
        param_types=None,
        query_mode=None,
        query_options=None,
        request_options=None,
        *,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform an ``ExecuteSql`` API request with DML.

        :type dml: str
        :param dml: SQL DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan.
            See:
            `QueryMode <https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode>`_.

        :type query_options:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions`
            or :class:`dict`
        :param query_options: (Optional) Options that are provided for query plan stability.

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        params_pb = self._make_params_pb(params, param_types)
        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        seqno, self._execute_sql_count = (
            self._execute_sql_count,
            self._execute_sql_count + 1,
        )

        # Query-level options have higher precedence than client-level and
        # environment-level options
        default_query_options = database._instance._client._query_options
        query_options = _merge_query_options(default_query_options,
                                             query_options)

        if type(request_options) == dict:
            request_options = RequestOptions(request_options)

        trace_attributes = {"db.statement": dml}

        request = ExecuteSqlRequest(
            session=self._session.name,
            sql=dml,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            query_options=query_options,
            seqno=seqno,
            request_options=request_options,
        )
        with trace_call("CloudSpanner.ReadWriteTransaction", self._session,
                        trace_attributes):
            response = api.execute_sql(request=request,
                                       metadata=metadata,
                                       retry=retry,
                                       timeout=timeout)
        return response.stats.row_count_exact
Esempio n. 20
0
    def partition_query(
        self,
        sql,
        params=None,
        param_types=None,
        partition_size_bytes=None,
        max_partitions=None,
        *,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform a ``PartitionQuery`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for (key, value) in params.items()
            })
        else:
            params_pb = Struct()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes,
            max_partitions=max_partitions)
        request = PartitionQueryRequest(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            partition_options=partition_options,
        )

        trace_attributes = {"db.statement": sql}
        with trace_call(
                "CloudSpanner.PartitionReadWriteTransaction",
                self._session,
                trace_attributes,
        ):
            response = api.partition_query(
                request=request,
                metadata=metadata,
                retry=retry,
                timeout=timeout,
            )

        return [partition.partition_token for partition in response.partitions]
Esempio n. 21
0
    def batch_update(self, statements):
        """Perform a batch of DML statements via an ``ExecuteBatchDml`` request.

        :type statements:
            Sequence[Union[ str, Tuple[str, Dict[str, Any], Dict[str, Union[dict, .types.Type]]]]]

        :param statements:
            List of DML statements, with optional params / param types.
            If passed, 'params' is a dict mapping names to the values
            for parameter replacement.  Keys must match the names used in the
            corresponding DML statement.  If 'params' is passed, 'param_types'
            must also be passed, as a dict mapping names to the type of
            value passed in 'params'.

        :rtype:
            Tuple(status, Sequence[int])
        :returns:
            Status code, plus counts of rows affected by each completed DML
            statement.  Note that if the staus code is not ``OK``, the
            statement triggering the error will not have an entry in the
            list, nor will any statements following that one.
        """
        parsed = []
        for statement in statements:
            if isinstance(statement, str):
                parsed.append({"sql": statement})
            else:
                dml, params, param_types = statement
                params_pb = self._make_params_pb(params, param_types)
                parsed.append({
                    "sql": dml,
                    "params": params_pb,
                    "param_types": param_types
                })

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        seqno, self._execute_sql_count = (
            self._execute_sql_count,
            self._execute_sql_count + 1,
        )

        trace_attributes = {
            # Get just the queries from the DML statement batch
            "db.statement":
            ";".join([statement["sql"] for statement in parsed])
        }
        with trace_call("CloudSpanner.DMLTransaction", self._session,
                        trace_attributes):
            response = api.execute_batch_dml(
                session=self._session.name,
                transaction=transaction,
                statements=parsed,
                seqno=seqno,
                metadata=metadata,
            )
        row_counts = [
            result_set.stats.row_count_exact
            for result_set in response.result_sets
        ]
        return response.status, row_counts
Esempio n. 22
0
    def partition_read(
        self,
        table,
        columns,
        keyset,
        index="",
        partition_size_bytes=None,
        max_partitions=None,
        *,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform a ``PartitionRead`` API request for rows in a table.

        :type table: str
        :param table: name of the table from which to fetch data

        :type columns: list of str
        :param columns: names of columns to be retrieved

        :type keyset: :class:`~google.cloud.spanner_v1.keyset.KeySet`
        :param keyset: keys / ranges identifying rows to be retrieved

        :type index: str
        :param index: (Optional) name of index to use, rather than the
                      table's primary key

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes,
            max_partitions=max_partitions)
        request = PartitionReadRequest(
            session=self._session.name,
            table=table,
            columns=columns,
            key_set=keyset._to_pb(),
            transaction=transaction,
            index=index,
            partition_options=partition_options,
        )

        trace_attributes = {"table_id": table, "columns": columns}
        with trace_call("CloudSpanner.PartitionReadOnlyTransaction",
                        self._session, trace_attributes):
            response = api.partition_read(
                request=request,
                metadata=metadata,
                retry=retry,
                timeout=timeout,
            )

        return [partition.partition_token for partition in response.partitions]
Esempio n. 23
0
 def test_no_trace_call(self):
     with _opentelemetry_tracing.trace_call("Test", _make_session()) as no_span:
         self.assertIsNone(no_span)
Esempio n. 24
0
    def batch_update(self, statements, request_options=None):
        """Perform a batch of DML statements via an ``ExecuteBatchDml`` request.

        :type statements:
            Sequence[Union[ str, Tuple[str, Dict[str, Any], Dict[str, Union[dict, .types.Type]]]]]

        :param statements:
            List of DML statements, with optional params / param types.
            If passed, 'params' is a dict mapping names to the values
            for parameter replacement.  Keys must match the names used in the
            corresponding DML statement.  If 'params' is passed, 'param_types'
            must also be passed, as a dict mapping names to the type of
            value passed in 'params'.

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :rtype:
            Tuple(status, Sequence[int])
        :returns:
            Status code, plus counts of rows affected by each completed DML
            statement.  Note that if the status code is not ``OK``, the
            statement triggering the error will not have an entry in the
            list, nor will any statements following that one.
        """
        parsed = []
        for statement in statements:
            if isinstance(statement, str):
                parsed.append(ExecuteBatchDmlRequest.Statement(sql=statement))
            else:
                dml, params, param_types = statement
                params_pb = self._make_params_pb(params, param_types)
                parsed.append(
                    ExecuteBatchDmlRequest.Statement(sql=dml,
                                                     params=params_pb,
                                                     param_types=param_types))

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        seqno, self._execute_sql_count = (
            self._execute_sql_count,
            self._execute_sql_count + 1,
        )

        if type(request_options) == dict:
            request_options = RequestOptions(request_options)

        trace_attributes = {
            # Get just the queries from the DML statement batch
            "db.statement": ";".join([statement.sql for statement in parsed])
        }
        request = ExecuteBatchDmlRequest(
            session=self._session.name,
            transaction=transaction,
            statements=parsed,
            seqno=seqno,
            request_options=request_options,
        )
        with trace_call("CloudSpanner.DMLTransaction", self._session,
                        trace_attributes):
            response = api.execute_batch_dml(request=request,
                                             metadata=metadata)
        row_counts = [
            result_set.stats.row_count_exact
            for result_set in response.result_sets
        ]
        return response.status, row_counts
Esempio n. 25
0
    def execute_update(self,
                       dml,
                       params=None,
                       param_types=None,
                       query_mode=None,
                       query_options=None):
        """Perform an ``ExecuteSql`` API request with DML.

        :type dml: str
        :param dml: SQL DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`~google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan.
            See:
            `QueryMode <https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode>`_.

        :type query_options:
            :class:`~google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryOptions`
            or :class:`dict`
        :param query_options: (Optional) Options that are provided for query plan stability.

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        params_pb = self._make_params_pb(params, param_types)
        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        seqno, self._execute_sql_count = (
            self._execute_sql_count,
            self._execute_sql_count + 1,
        )

        # Query-level options have higher precedence than client-level and
        # environment-level options
        default_query_options = database._instance._client._query_options
        query_options = _merge_query_options(default_query_options,
                                             query_options)

        trace_attributes = {"db.statement": dml}
        with trace_call("CloudSpanner.ReadWriteTransaction", self._session,
                        trace_attributes):
            response = api.execute_sql(
                self._session.name,
                dml,
                transaction=transaction,
                params=params_pb,
                param_types=param_types,
                query_mode=query_mode,
                query_options=query_options,
                seqno=seqno,
                metadata=metadata,
            )
        return response.stats.row_count_exact