def _make_params_pb(params, param_types):
        """Helper for :meth:`execute_update`.

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :rtype: Union[None, :class:`Struct`]
        :returns: a struct message for the passed params, or None
        :raises ValueError:
            If ``param_types`` is None but ``params`` is not None.
        :raises ValueError:
            If ``params`` is None but ``param_types`` is not None.
        """
        if params is not None:
            if param_types is None:
                raise ValueError("Specify 'param_types' when passing 'params'.")
            return Struct(
                fields={key: _make_value_pb(value) for key, value in params.items()}
            )
        else:
            if param_types is not None:
                raise ValueError("Specify 'params' when passing 'param_types'.")

        return None
Esempio n. 2
0
    def execute_sql(self, sql, params=None, param_types=None, query_mode=None):
        """Perform an ``ExecuteStreamingSql`` API request for rows in a table.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan. See
            https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1

        :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
        :returns: a result set instance which can be used to consume rows.
        :raises ValueError:
            for reuse of single-use snapshots, or if a transaction ID is
            already pending for multiple-use snapshots.
        """
        if self._read_request_count > 0:
            if not self._multi_use:
                raise ValueError("Cannot re-use single-use snapshot.")
            if self._transaction_id is None:
                raise ValueError("Transaction ID pending.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value) for key, value in params.items()})
        else:
            params_pb = None

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        restart = functools.partial(
            api.execute_streaming_sql,
            self._session.name, sql,
            transaction=transaction, params=params_pb, param_types=param_types,
            query_mode=query_mode, metadata=metadata)

        iterator = _restart_on_unavailable(restart)

        self._read_request_count += 1

        if self._multi_use:
            return StreamedResultSet(iterator, source=self)
        else:
            return StreamedResultSet(iterator)
Esempio n. 3
0
    def _make_params_pb(params, param_types):
        """Helper for :meth:`execute_update`.

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :rtype: Union[None, :class:`Struct`]
        :returns: a struct message for the passed params, or None
        :raises ValueError:
            If ``param_types`` is None but ``params`` is not None.
        :raises ValueError:
            If ``params`` is None but ``param_types`` is not None.
        """
        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            return Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            if param_types is not None:
                raise ValueError(
                    "Specify 'params' when passing 'param_types'.")

        return {}
Esempio n. 4
0
    def execute_partitioned_dml(self, dml, params=None, param_types=None):
        """Execute a partitionable DML statement.

        :type dml: str
        :param dml: DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = None

        api = self.spanner_api

        txn_options = TransactionOptions(
            partitioned_dml=TransactionOptions.PartitionedDml())

        metadata = _metadata_with_prefix(self.name)

        with SessionCheckout(self._pool) as session:

            txn = api.begin_transaction(session.name,
                                        txn_options,
                                        metadata=metadata)

            txn_selector = TransactionSelector(id=txn.id)

            restart = functools.partial(
                api.execute_streaming_sql,
                session.name,
                dml,
                transaction=txn_selector,
                params=params_pb,
                param_types=param_types,
                metadata=metadata,
            )

            iterator = _restart_on_unavailable(restart)

            result_set = StreamedResultSet(iterator)
            list(result_set)  # consume all partials

            return result_set.stats.row_count_lower_bound
Esempio n. 5
0
    def _execute_update_helper(self, count=0, query_options=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1 import (
            ResultSet,
            ResultSetStats,
        )
        from google.cloud.spanner_v1 import TransactionSelector
        from google.cloud.spanner_v1._helpers import (
            _make_value_pb,
            _merge_query_options,
        )
        from google.cloud.spanner_v1 import ExecuteSqlRequest

        MODE = 2  # PROFILE
        stats_pb = ResultSetStats(row_count_exact=1)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_sql.return_value = ResultSet(stats=stats_pb)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        row_count = transaction.execute_update(
            DML_QUERY_WITH_PARAM,
            PARAMS,
            PARAM_TYPES,
            query_mode=MODE,
            query_options=query_options,
        )

        self.assertEqual(row_count, 1)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        expected_query_options = database._instance._client._query_options
        if query_options:
            expected_query_options = _merge_query_options(
                expected_query_options, query_options
            )

        expected_request = ExecuteSqlRequest(
            session=self.SESSION_NAME,
            sql=DML_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            query_options=expected_query_options,
            seqno=count,
        )
        api.execute_sql.assert_called_once_with(
            request=expected_request,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
    def _make_result_set_stats(query_plan=None, **kw):
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSetStats
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1._helpers import _make_value_pb

        query_stats = Struct(
            fields={key: _make_value_pb(value) for key, value in kw.items()}
        )
        return ResultSetStats(query_plan=query_plan, query_stats=query_stats)
Esempio n. 7
0
    def execute_partitioned_dml(self, dml, params=None, param_types=None):
        """Execute a partitionable DML statement.

        :type dml: str
        :param dml: DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        if params is not None:
            if param_types is None:
                raise ValueError("Specify 'param_types' when passing 'params'.")
            params_pb = Struct(
                fields={key: _make_value_pb(value) for key, value in params.items()}
            )
        else:
            params_pb = None

        api = self.spanner_api

        txn_options = TransactionOptions(
            partitioned_dml=TransactionOptions.PartitionedDml()
        )

        metadata = _metadata_with_prefix(self.name)

        with SessionCheckout(self._pool) as session:

            txn = api.begin_transaction(session.name, txn_options, metadata=metadata)

            txn_selector = TransactionSelector(id=txn.id)

            restart = functools.partial(
                api.execute_streaming_sql,
                session.name,
                dml,
                transaction=txn_selector,
                params=params_pb,
                param_types=param_types,
                metadata=metadata,
            )

            iterator = _restart_on_unavailable(restart)

            result_set = StreamedResultSet(iterator)
            list(result_set)  # consume all partials

            return result_set.stats.row_count_lower_bound
Esempio n. 8
0
    def _make_result_set_stats(query_plan=None, **kw):
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSetStats
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1._helpers import _make_value_pb

        query_stats = Struct(
            fields={key: _make_value_pb(value)
                    for key, value in kw.items()})
        return ResultSetStats(query_plan=query_plan, query_stats=query_stats)
    def _partition_query_helper(
            self, multi_use, w_txn, size=None, max_partitions=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.types import Partition
        from google.cloud.spanner_v1.types import PartitionOptions
        from google.cloud.spanner_v1.types import PartitionResponse
        from google.cloud.spanner_v1.types import Transaction
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1._helpers import _make_value_pb

        new_txn_id = b'ABECAB91'
        token_1 = b'FACE0FFF'
        token_2 = b'BADE8CAF'
        response = PartitionResponse(
            partitions=[
                Partition(partition_token=token_1),
                Partition(partition_token=token_2),
            ],
            transaction=Transaction(id=new_txn_id),
        )
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _partition_query_response=response)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        if w_txn:
            derived._transaction_id = TXN_ID

        tokens = list(derived.partition_query(
            SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES,
            partition_size_bytes=size,
            max_partitions=max_partitions,
        ))

        self.assertEqual(tokens, [token_1, token_2])

        (r_session, sql, transaction, params, param_types,
         partition_options, metadata) = api._partition_query_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        self.assertEqual(transaction.id, TXN_ID)
        self.assertFalse(transaction.HasField('begin'))
        expected_params = Struct(fields={
            key: _make_value_pb(value) for (key, value) in PARAMS.items()})
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(
            partition_options,
            PartitionOptions(
                partition_size_bytes=size, max_partitions=max_partitions))
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
Esempio n. 10
0
    def execute_update(self,
                       dml,
                       params=None,
                       param_types=None,
                       query_mode=None):
        """Perform an ``ExecuteSql`` API request with DML.

        :type dml: str
        :param dml: SQL DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan. See
            https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = None

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        response = api.execute_sql(
            self._session.name,
            dml,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            seqno=self._execute_sql_count,
            metadata=metadata,
        )

        self._execute_sql_count += 1
        return response.stats.row_count_exact
    def test__make_params_pb_w_params_w_param_types(self):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1._helpers import _make_value_pb

        session = _Session()
        transaction = self._make_one(session)

        params_pb = transaction._make_params_pb(PARAMS, PARAM_TYPES)

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )
        self.assertEqual(params_pb, expected_params)
Esempio n. 12
0
    def test__make_params_pb_w_params_w_param_types(self):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1._helpers import _make_value_pb

        session = _Session()
        transaction = self._make_one(session)

        params_pb = transaction._make_params_pb(PARAMS, PARAM_TYPES)

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )
        self.assertEqual(params_pb, expected_params)
Esempio n. 13
0
    def execute_update(self, dml, params=None, param_types=None, query_mode=None):
        """Perform an ``ExecuteSql`` API request with DML.

        :type dml: str
        :param dml: SQL DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan. See
            https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        if params is not None:
            if param_types is None:
                raise ValueError("Specify 'param_types' when passing 'params'.")
            params_pb = Struct(
                fields={key: _make_value_pb(value) for key, value in params.items()}
            )
        else:
            params_pb = None

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        response = api.execute_sql(
            self._session.name,
            dml,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            seqno=self._execute_sql_count,
            metadata=metadata,
        )

        self._execute_sql_count += 1
        return response.stats.row_count_exact
Esempio n. 14
0
    def _execute_update_helper(self, count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            ResultSet, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1._helpers import _make_value_pb

        MODE = 2  # PROFILE
        stats_pb = ResultSetStats(row_count_exact=1)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_sql.return_value = ResultSet(stats=stats_pb)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        row_count = transaction.execute_update(DML_QUERY_WITH_PARAM,
                                               PARAMS,
                                               PARAM_TYPES,
                                               query_mode=MODE)

        self.assertEqual(row_count, 1)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })

        api.execute_sql.assert_called_once_with(
            self.SESSION_NAME,
            DML_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            seqno=count,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
    def _execute_update_helper(self, count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            ResultSet,
            ResultSetStats,
        )
        from google.cloud.spanner_v1.proto.transaction_pb2 import TransactionSelector
        from google.cloud.spanner_v1._helpers import _make_value_pb

        MODE = 2  # PROFILE
        stats_pb = ResultSetStats(row_count_exact=1)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_sql.return_value = ResultSet(stats=stats_pb)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        row_count = transaction.execute_update(
            DML_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE
        )

        self.assertEqual(row_count, 1)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        api.execute_sql.assert_called_once_with(
            self.SESSION_NAME,
            DML_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            seqno=count,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
Esempio n. 16
0
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1 import (
            PartialResultSet,
            ResultSetMetadata,
            ResultSetStats,
        )
        from google.cloud.spanner_v1 import (
            TransactionSelector,
            TransactionOptions,
        )
        from google.cloud.spanner_v1 import ReadRequest
        from google.cloud.spanner_v1 import Type, StructType
        from google.cloud.spanner_v1 import TypeCode
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [[u"bharney", 31], [u"phred", 32]]
        struct_type_pb = StructType(
            fields=[
                StructType.Field(name="name", type_=Type(code=TypeCode.STRING)),
                StructType.Field(name="age", type_=Type(code=TypeCode.INT64)),
            ]
        )
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={"rows_returned": _make_value_pb(2)})
        )
        result_sets = [
            PartialResultSet(metadata=metadata_pb),
            PartialResultSet(stats=stats_pb),
        ]
        for i in range(len(result_sets)):
            result_sets[i].values.extend(VALUES[i])
        KEYS = [["*****@*****.**"], ["*****@*****.**"]]
        keyset = KeySet(keys=KEYS)
        INDEX = "email-address-index"
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.streaming_read.return_value = _MockIterator(*result_sets)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset, index=INDEX, partition=partition
            )
        else:
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset, index=INDEX, limit=LIMIT
            )

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(strong=True)
        )

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        if partition is not None:
            expected_limit = 0
        else:
            expected_limit = LIMIT

        expected_request = ReadRequest(
            session=self.SESSION_NAME,
            table=TABLE_NAME,
            columns=COLUMNS,
            key_set=keyset._to_pb(),
            transaction=expected_transaction,
            index=INDEX,
            limit=expected_limit,
            partition_token=partition,
        )
        api.streaming_read.assert_called_once_with(
            request=expected_request,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertSpanAttributes(
            "CloudSpanner.ReadOnlyTransaction",
            attributes=dict(
                BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS)
            ),
        )
Esempio n. 17
0
    def _partition_query_helper(self, multi_use, w_txn, size=None, max_partitions=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1 import Partition
        from google.cloud.spanner_v1 import PartitionOptions
        from google.cloud.spanner_v1 import PartitionQueryRequest
        from google.cloud.spanner_v1 import PartitionResponse
        from google.cloud.spanner_v1 import Transaction
        from google.cloud.spanner_v1 import TransactionSelector
        from google.cloud.spanner_v1._helpers import _make_value_pb

        new_txn_id = b"ABECAB91"
        token_1 = b"FACE0FFF"
        token_2 = b"BADE8CAF"
        response = PartitionResponse(
            partitions=[
                Partition(partition_token=token_1),
                Partition(partition_token=token_2),
            ],
            transaction=Transaction(id=new_txn_id),
        )
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.partition_query.return_value = response
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        if w_txn:
            derived._transaction_id = TXN_ID

        tokens = list(
            derived.partition_query(
                SQL_QUERY_WITH_PARAM,
                PARAMS,
                PARAM_TYPES,
                partition_size_bytes=size,
                max_partitions=max_partitions,
            )
        )

        self.assertEqual(tokens, [token_1, token_2])

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        expected_txn_selector = TransactionSelector(id=TXN_ID)

        expected_partition_options = PartitionOptions(
            partition_size_bytes=size, max_partitions=max_partitions
        )

        expected_request = PartitionQueryRequest(
            session=self.SESSION_NAME,
            sql=SQL_QUERY_WITH_PARAM,
            transaction=expected_txn_selector,
            params=expected_params,
            param_types=PARAM_TYPES,
            partition_options=expected_partition_options,
        )
        api.partition_query.assert_called_once_with(
            request=expected_request,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertSpanAttributes(
            "CloudSpanner.PartitionReadWriteTransaction",
            status=StatusCanonicalCode.OK,
            attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}),
        )
Esempio n. 18
0
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector, TransactionOptions)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = [['*****@*****.**'], ['*****@*****.**']]
        keyset = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.streaming_read.return_value = _MockIterator(*result_sets)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      partition=partition)
        else:
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(read_only=TransactionOptions.ReadOnly(
            strong=True))

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        if partition is not None:
            expected_limit = 0
        else:
            expected_limit = LIMIT

        api.streaming_read.assert_called_once_with(
            self.SESSION_NAME,
            TABLE_NAME,
            COLUMNS,
            keyset._to_pb(),
            transaction=expected_transaction,
            index=INDEX,
            limit=expected_limit,
            partition_token=partition,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )
Esempio n. 19
0
    def execute_sql(
        self,
        sql,
        params=None,
        param_types=None,
        query_mode=None,
        query_options=None,
        request_options=None,
        partition=None,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform an ``ExecuteStreamingSql`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan.
            See:
            `QueryMode <https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode>`_.

        :type query_options:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions`
                or :class:`dict`
        :param query_options:
                (Optional) Query optimizer configuration to use for the given query.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.QueryOptions`

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :type partition: bytes
        :param partition: (Optional) one of the partition tokens returned
                          from :meth:`partition_query`.

        :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
        :returns: a result set instance which can be used to consume rows.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :raises ValueError:
            for reuse of single-use snapshots, or if a transaction ID is
            already pending for multiple-use snapshots.
        """
        if self._read_request_count > 0:
            if not self._multi_use:
                raise ValueError("Cannot re-use single-use snapshot.")
            if self._transaction_id is None:
                raise ValueError("Transaction ID pending.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = {}

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        # Query-level options have higher precedence than client-level and
        # environment-level options
        default_query_options = database._instance._client._query_options
        query_options = _merge_query_options(default_query_options,
                                             query_options)

        if type(request_options) == dict:
            request_options = RequestOptions(request_options)

        request = ExecuteSqlRequest(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            partition_token=partition,
            seqno=self._execute_sql_count,
            query_options=query_options,
            request_options=request_options,
        )
        restart = functools.partial(
            api.execute_streaming_sql,
            request=request,
            metadata=metadata,
            retry=retry,
            timeout=timeout,
        )

        trace_attributes = {"db.statement": sql}
        iterator = _restart_on_unavailable(
            restart,
            request,
            "CloudSpanner.ReadWriteTransaction",
            self._session,
            trace_attributes,
        )

        self._read_request_count += 1
        self._execute_sql_count += 1

        if self._multi_use:
            return StreamedResultSet(iterator, source=self)
        else:
            return StreamedResultSet(iterator)
Esempio n. 20
0
    def _make_value(value):
        from google.cloud.spanner_v1._helpers import _make_value_pb

        return _make_value_pb(value)
    def _partition_query_helper(self,
                                multi_use,
                                w_txn,
                                size=None,
                                max_partitions=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.types import Partition
        from google.cloud.spanner_v1.types import PartitionOptions
        from google.cloud.spanner_v1.types import PartitionResponse
        from google.cloud.spanner_v1.types import Transaction
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1._helpers import _make_value_pb

        txn_id = b'DEADBEEF'
        new_txn_id = b'ABECAB91'
        token_1 = b'FACE0FFF'
        token_2 = b'BADE8CAF'
        response = PartitionResponse(
            partitions=[
                Partition(partition_token=token_1),
                Partition(partition_token=token_2),
            ],
            transaction=Transaction(id=new_txn_id),
        )
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _partition_query_response=response)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        if w_txn:
            derived._transaction_id = txn_id

        tokens = list(
            derived.partition_query(
                SQL_QUERY_WITH_PARAM,
                PARAMS,
                PARAM_TYPES,
                partition_size_bytes=size,
                max_partitions=max_partitions,
            ))

        self.assertEqual(tokens, [token_1, token_2])

        (r_session, sql, transaction, params, param_types, partition_options,
         metadata) = api._partition_query_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        self.assertEqual(transaction.id, txn_id)
        self.assertFalse(transaction.HasField('begin'))
        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(
            partition_options,
            PartitionOptions(partition_size_bytes=size,
                             max_partitions=max_partitions))
        self.assertEqual(metadata,
                         [('google-cloud-resource-prefix', database.name)])
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        txn_id = b'DEADBEEF'
        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = ['*****@*****.**', '*****@*****.**']
        keyset = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _streaming_read_response=_MockIterator(*result_sets))
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = txn_id

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      partition=partition)
        else:
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, table, columns, key_set, transaction, index, limit,
         resume_token, r_partition, metadata) = api._streaming_read_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(table, TABLE_NAME)
        self.assertEqual(columns, COLUMNS)
        self.assertEqual(key_set, keyset._to_pb())
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, txn_id)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        self.assertEqual(index, INDEX)
        if partition is not None:
            self.assertEqual(limit, 0)
            self.assertEqual(r_partition, partition)
        else:
            self.assertEqual(limit, LIMIT)
            self.assertIsNone(r_partition)
        self.assertEqual(resume_token, b'')
        self.assertEqual(metadata,
                         [('google-cloud-resource-prefix', database.name)])
    def execute_sql(
        self,
        sql,
        params=None,
        param_types=None,
        query_mode=None,
        partition=None,
        retry=google.api_core.gapic_v1.method.DEFAULT,
        timeout=google.api_core.gapic_v1.method.DEFAULT,
    ):
        """Perform an ``ExecuteStreamingSql`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan. See
            https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1

        :type partition: bytes
        :param partition: (Optional) one of the partition tokens returned
                          from :meth:`partition_query`.

        :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
        :returns: a result set instance which can be used to consume rows.

        :raises ValueError:
            for reuse of single-use snapshots, or if a transaction ID is
            already pending for multiple-use snapshots.
        """
        if self._read_request_count > 0:
            if not self._multi_use:
                raise ValueError("Cannot re-use single-use snapshot.")
            if self._transaction_id is None:
                raise ValueError("Transaction ID pending.")

        if params is not None:
            if param_types is None:
                raise ValueError("Specify 'param_types' when passing 'params'.")
            params_pb = Struct(
                fields={key: _make_value_pb(value) for key, value in params.items()}
            )
        else:
            params_pb = None

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        restart = functools.partial(
            api.execute_streaming_sql,
            self._session.name,
            sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            partition_token=partition,
            seqno=self._execute_sql_count,
            metadata=metadata,
            retry=retry,
            timeout=timeout,
        )

        iterator = _restart_on_unavailable(restart)

        self._read_request_count += 1
        self._execute_sql_count += 1

        if self._multi_use:
            return StreamedResultSet(iterator, source=self)
        else:
            return StreamedResultSet(iterator)
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [
            [_make_value_pb(item) for item in row]
            for row in VALUES
        ]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = ['*****@*****.**', '*****@*****.**']
        keyset = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _streaming_read_response=_MockIterator(*result_sets))
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset,
                index=INDEX, partition=partition)
        else:
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset,
                index=INDEX, limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, table, columns, key_set, transaction, index, limit,
         resume_token, r_partition, metadata) = api._streaming_read_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(table, TABLE_NAME)
        self.assertEqual(columns, COLUMNS)
        self.assertEqual(key_set, keyset._to_pb())
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, TXN_ID)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        self.assertEqual(index, INDEX)
        if partition is not None:
            self.assertEqual(limit, 0)
            self.assertEqual(r_partition, partition)
        else:
            self.assertEqual(limit, LIMIT)
            self.assertIsNone(r_partition)
        self.assertEqual(resume_token, b'')
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
    def _make_value(value):
        from google.cloud.spanner_v1._helpers import _make_value_pb

        return _make_value_pb(value)
Esempio n. 26
0
    def partition_query(self,
                        sql,
                        params=None,
                        param_types=None,
                        partition_size_bytes=None,
                        max_partitions=None):
        """Perform a ``ParitionQuery`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associtated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = None

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes,
            max_partitions=max_partitions,
        )

        response = api.partition_query(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            partition_options=partition_options,
            metadata=metadata,
        )

        return [partition.partition_token for partition in response.partitions]
Esempio n. 27
0
    def _partition_query_helper(self,
                                multi_use,
                                w_txn,
                                size=None,
                                max_partitions=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.types import Partition
        from google.cloud.spanner_v1.types import PartitionOptions
        from google.cloud.spanner_v1.types import PartitionResponse
        from google.cloud.spanner_v1.types import Transaction
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1._helpers import _make_value_pb

        new_txn_id = b'ABECAB91'
        token_1 = b'FACE0FFF'
        token_2 = b'BADE8CAF'
        response = PartitionResponse(
            partitions=[
                Partition(partition_token=token_1),
                Partition(partition_token=token_2),
            ],
            transaction=Transaction(id=new_txn_id),
        )
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.partition_query.return_value = response
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        if w_txn:
            derived._transaction_id = TXN_ID

        tokens = list(
            derived.partition_query(
                SQL_QUERY_WITH_PARAM,
                PARAMS,
                PARAM_TYPES,
                partition_size_bytes=size,
                max_partitions=max_partitions,
            ))

        self.assertEqual(tokens, [token_1, token_2])

        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })

        expected_txn_selector = TransactionSelector(id=TXN_ID)

        expected_partition_options = PartitionOptions(
            partition_size_bytes=size, max_partitions=max_partitions)

        api.partition_query.assert_called_once_with(
            session=self.SESSION_NAME,
            sql=SQL_QUERY_WITH_PARAM,
            transaction=expected_txn_selector,
            params=expected_params,
            param_types=PARAM_TYPES,
            partition_options=expected_partition_options,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )
Esempio n. 28
0
    def _execute_sql_helper(self,
                            multi_use,
                            first=True,
                            count=0,
                            partition=None,
                            sql_count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector, TransactionOptions)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_streaming_sql.return_value = iterator
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        derived._execute_sql_count = sql_count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(SQL_QUERY_WITH_PARAM,
                                         PARAMS,
                                         PARAM_TYPES,
                                         query_mode=MODE,
                                         partition=partition)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(read_only=TransactionOptions.ReadOnly(
            strong=True))

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })

        api.execute_streaming_sql.assert_called_once_with(
            self.SESSION_NAME,
            SQL_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            partition_token=partition,
            seqno=sql_count,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )

        self.assertEqual(derived._execute_sql_count, sql_count + 1)
Esempio n. 29
0
    def _batch_update_helper(self, error_after=None, count=0):
        from google.rpc.status_pb2 import Status
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSet
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSetStats
        from google.cloud.spanner_v1.proto.spanner_pb2 import ExecuteBatchDmlResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import TransactionSelector
        from google.cloud.spanner_v1._helpers import _make_value_pb

        insert_dml = "INSERT INTO table(pkey, desc) VALUES (%pkey, %desc)"
        insert_params = {"pkey": 12345, "desc": "DESCRIPTION"}
        insert_param_types = {"pkey": "INT64", "desc": "STRING"}
        update_dml = 'UPDATE table SET desc = desc + "-amended"'
        delete_dml = "DELETE FROM table WHERE desc IS NULL"

        dml_statements = [
            (insert_dml, insert_params, insert_param_types),
            update_dml,
            delete_dml,
        ]

        stats_pbs = [
            ResultSetStats(row_count_exact=1),
            ResultSetStats(row_count_exact=2),
            ResultSetStats(row_count_exact=3),
        ]
        if error_after is not None:
            stats_pbs = stats_pbs[:error_after]
            expected_status = Status(code=400)
        else:
            expected_status = Status(code=200)
        expected_row_counts = [stats.row_count_exact for stats in stats_pbs]

        response = ExecuteBatchDmlResponse(
            status=expected_status,
            result_sets=[ResultSet(stats=stats_pb) for stats_pb in stats_pbs],
        )
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_batch_dml.return_value = response
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        status, row_counts = transaction.batch_update(dml_statements)

        self.assertEqual(status, expected_status)
        self.assertEqual(row_counts, expected_row_counts)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_insert_params = Struct(
            fields={
                key: _make_value_pb(value) for (key, value) in insert_params.items()
            }
        )
        expected_statements = [
            {
                "sql": insert_dml,
                "params": expected_insert_params,
                "param_types": insert_param_types,
            },
            {"sql": update_dml},
            {"sql": delete_dml},
        ]

        api.execute_batch_dml.assert_called_once_with(
            session=self.SESSION_NAME,
            transaction=expected_transaction,
            statements=expected_statements,
            seqno=count,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
    def partition_query(
        self,
        sql,
        params=None,
        param_types=None,
        partition_size_bytes=None,
        max_partitions=None,
    ):
        """Perform a ``ParitionQuery`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associtated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        if params is not None:
            if param_types is None:
                raise ValueError("Specify 'param_types' when passing 'params'.")
            params_pb = Struct(
                fields={key: _make_value_pb(value) for key, value in params.items()}
            )
        else:
            params_pb = None

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes, max_partitions=max_partitions
        )

        response = api.partition_query(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            partition_options=partition_options,
            metadata=metadata,
        )

        return [partition.partition_token for partition in response.partitions]
    def _execute_sql_helper(self,
                            multi_use,
                            first=True,
                            count=0,
                            partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_value_pb

        txn_id = b'DEADBEEF'
        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _execute_streaming_sql_response=iterator)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = txn_id

        result_set = derived.execute_sql(SQL_QUERY_WITH_PARAM,
                                         PARAMS,
                                         PARAM_TYPES,
                                         query_mode=MODE,
                                         partition=partition)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, sql, transaction, params, param_types, resume_token,
         query_mode, partition_token,
         metadata) = api._executed_streaming_sql_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, txn_id)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(query_mode, MODE)
        self.assertEqual(resume_token, b'')
        self.assertEqual(partition_token, partition)
        self.assertEqual(metadata,
                         [('google-cloud-resource-prefix', database.name)])
    def _batch_update_helper(self, error_after=None, count=0):
        from google.rpc.status_pb2 import Status
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSet
        from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSetStats
        from google.cloud.spanner_v1.proto.spanner_pb2 import ExecuteBatchDmlResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import TransactionSelector
        from google.cloud.spanner_v1._helpers import _make_value_pb

        insert_dml = "INSERT INTO table(pkey, desc) VALUES (%pkey, %desc)"
        insert_params = {"pkey": 12345, "desc": "DESCRIPTION"}
        insert_param_types = {"pkey": "INT64", "desc": "STRING"}
        update_dml = 'UPDATE table SET desc = desc + "-amended"'
        delete_dml = "DELETE FROM table WHERE desc IS NULL"

        dml_statements = [
            (insert_dml, insert_params, insert_param_types),
            update_dml,
            delete_dml,
        ]

        stats_pbs = [
            ResultSetStats(row_count_exact=1),
            ResultSetStats(row_count_exact=2),
            ResultSetStats(row_count_exact=3),
        ]
        if error_after is not None:
            stats_pbs = stats_pbs[:error_after]
            expected_status = Status(code=400)
        else:
            expected_status = Status(code=200)
        expected_row_counts = [stats.row_count_exact for stats in stats_pbs]

        response = ExecuteBatchDmlResponse(
            status=expected_status,
            result_sets=[ResultSet(stats=stats_pb) for stats_pb in stats_pbs],
        )
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_batch_dml.return_value = response
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        status, row_counts = transaction.batch_update(dml_statements)

        self.assertEqual(status, expected_status)
        self.assertEqual(row_counts, expected_row_counts)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_insert_params = Struct(
            fields={
                key: _make_value_pb(value) for (key, value) in insert_params.items()
            }
        )
        expected_statements = [
            {
                "sql": insert_dml,
                "params": expected_insert_params,
                "param_types": insert_param_types,
            },
            {"sql": update_dml},
            {"sql": delete_dml},
        ]

        api.execute_batch_dml.assert_called_once_with(
            session=self.SESSION_NAME,
            transaction=expected_transaction,
            statements=expected_statements,
            seqno=count,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
    def _execute_sql_helper(
            self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [
            [_make_value_pb(item) for item in row]
            for row in VALUES
        ]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _execute_streaming_sql_response=iterator)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(
            SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES,
            query_mode=MODE, partition=partition)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, sql, transaction, params, param_types,
         resume_token, query_mode, partition_token,
         metadata) = api._executed_streaming_sql_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, TXN_ID)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        expected_params = Struct(fields={
            key: _make_value_pb(value) for (key, value) in PARAMS.items()})
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(query_mode, MODE)
        self.assertEqual(resume_token, b'')
        self.assertEqual(partition_token, partition)
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
Esempio n. 34
0
    def _callFUT(self, *args, **kw):
        from google.cloud.spanner_v1._helpers import _make_value_pb

        return _make_value_pb(*args, **kw)
Esempio n. 35
0
    def _callFUT(self, *args, **kw):
        from google.cloud.spanner_v1._helpers import _make_value_pb

        return _make_value_pb(*args, **kw)
Esempio n. 36
0
    def execute_partitioned_dml(self,
                                dml,
                                params=None,
                                param_types=None,
                                query_options=None):
        """Execute a partitionable DML statement.

        :type dml: str
        :param dml: DML statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``dml``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_options:
            :class:`~google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryOptions`
            or :class:`dict`
        :param query_options:
                (Optional) Query optimizer configuration to use for the given query.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.QueryOptions`

        :rtype: int
        :returns: Count of rows affected by the DML statement.
        """
        query_options = _merge_query_options(
            self._instance._client._query_options, query_options)
        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = None

        api = self.spanner_api

        txn_options = TransactionOptions(
            partitioned_dml=TransactionOptions.PartitionedDml())

        metadata = _metadata_with_prefix(self.name)

        def execute_pdml():
            with SessionCheckout(self._pool) as session:

                txn = api.begin_transaction(session.name,
                                            txn_options,
                                            metadata=metadata)

                txn_selector = TransactionSelector(id=txn.id)

                restart = functools.partial(
                    api.execute_streaming_sql,
                    session.name,
                    dml,
                    transaction=txn_selector,
                    params=params_pb,
                    param_types=param_types,
                    query_options=query_options,
                    metadata=metadata,
                )

                iterator = _restart_on_unavailable(restart)

                result_set = StreamedResultSet(iterator)
                list(result_set)  # consume all partials

                return result_set.stats.row_count_lower_bound

        retry_config = api._method_configs["ExecuteStreamingSql"].retry

        return _retry_on_aborted(execute_pdml, retry_config)()
Esempio n. 37
0
    def partition_query(
        self,
        sql,
        params=None,
        param_types=None,
        partition_size_bytes=None,
        max_partitions=None,
        *,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform a ``PartitionQuery`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for (key, value) in params.items()
            })
        else:
            params_pb = Struct()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes,
            max_partitions=max_partitions)
        request = PartitionQueryRequest(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            partition_options=partition_options,
        )

        trace_attributes = {"db.statement": sql}
        with trace_call(
                "CloudSpanner.PartitionReadWriteTransaction",
                self._session,
                trace_attributes,
        ):
            response = api.partition_query(
                request=request,
                metadata=metadata,
                retry=retry,
                timeout=timeout,
            )

        return [partition.partition_token for partition in response.partitions]
Esempio n. 38
0
    def _execute_sql_helper(
        self,
        multi_use,
        first=True,
        count=0,
        partition=None,
        sql_count=0,
        query_options=None,
        timeout=google.api_core.gapic_v1.method.DEFAULT,
        retry=google.api_core.gapic_v1.method.DEFAULT,
    ):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet,
            ResultSetMetadata,
            ResultSetStats,
        )
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector,
            TransactionOptions,
        )
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import (
            _make_value_pb,
            _merge_query_options,
        )

        VALUES = [[u"bharney", u"rhubbyl", 31], [u"phred", u"phlyntstone", 32]]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(
            fields=[
                StructType.Field(name="first_name", type=Type(code=STRING)),
                StructType.Field(name="last_name", type=Type(code=STRING)),
                StructType.Field(name="age", type=Type(code=INT64)),
            ]
        )
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={"rows_returned": _make_value_pb(2)})
        )
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_streaming_sql.return_value = iterator
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        derived._execute_sql_count = sql_count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(
            SQL_QUERY_WITH_PARAM,
            PARAMS,
            PARAM_TYPES,
            query_mode=MODE,
            query_options=query_options,
            partition=partition,
            retry=retry,
            timeout=timeout,
        )

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(strong=True)
        )

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        expected_query_options = database._instance._client._query_options
        if query_options:
            expected_query_options = _merge_query_options(
                expected_query_options, query_options
            )

        api.execute_streaming_sql.assert_called_once_with(
            self.SESSION_NAME,
            SQL_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            query_options=expected_query_options,
            partition_token=partition,
            seqno=sql_count,
            metadata=[("google-cloud-resource-prefix", database.name)],
            timeout=timeout,
            retry=retry,
        )

        self.assertEqual(derived._execute_sql_count, sql_count + 1)

        self.assertSpanAttributes(
            "CloudSpanner.ReadWriteTransaction",
            status=StatusCanonicalCode.OK,
            attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}),
        )