def _make_txn_selector(self): """Helper for :meth:`read`.""" if self._transaction_id is not None: return TransactionSelector(id=self._transaction_id) if self._read_timestamp: key = "read_timestamp" value = self._read_timestamp elif self._min_read_timestamp: key = "min_read_timestamp" value = self._min_read_timestamp elif self._max_staleness: key = "max_staleness" value = self._max_staleness elif self._exact_staleness: key = "exact_staleness" value = self._exact_staleness else: key = "strong" value = True options = TransactionOptions(read_only=TransactionOptions.ReadOnly( **{key: value})) if self._multi_use: return TransactionSelector(begin=options) else: return TransactionSelector(single_use=options)
def _make_txn_selector(self): from google.cloud.spanner_v1 import ( TransactionOptions, TransactionSelector, ) if self._transaction_id: return TransactionSelector(id=self._transaction_id) options = TransactionOptions( read_only=TransactionOptions.ReadOnly(strong=True) ) if self._multi_use: return TransactionSelector(begin=options) return TransactionSelector(single_use=options)
def execute_pdml(): with SessionCheckout(self._pool) as session: txn = api.begin_transaction( session=session.name, options=txn_options, metadata=metadata ) txn_selector = TransactionSelector(id=txn.id) request = ExecuteSqlRequest( session=session.name, sql=dml, transaction=txn_selector, params=params_pb, param_types=param_types, query_options=query_options, ) restart = functools.partial( api.execute_streaming_sql, request=request, metadata=metadata, ) iterator = _restart_on_unavailable(restart) result_set = StreamedResultSet(iterator) list(result_set) # consume all partials return result_set.stats.row_count_lower_bound
def _execute_update_helper(self, count=0, query_options=None): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1 import ( ResultSet, ResultSetStats, ) from google.cloud.spanner_v1 import TransactionSelector from google.cloud.spanner_v1._helpers import ( _make_value_pb, _merge_query_options, ) from google.cloud.spanner_v1 import ExecuteSqlRequest MODE = 2 # PROFILE stats_pb = ResultSetStats(row_count_exact=1) database = _Database() api = database.spanner_api = self._make_spanner_api() api.execute_sql.return_value = ResultSet(stats=stats_pb) session = _Session(database) transaction = self._make_one(session) transaction._transaction_id = self.TRANSACTION_ID transaction._execute_sql_count = count row_count = transaction.execute_update( DML_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE, query_options=query_options, ) self.assertEqual(row_count, 1) expected_transaction = TransactionSelector(id=self.TRANSACTION_ID) expected_params = Struct( fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()} ) expected_query_options = database._instance._client._query_options if query_options: expected_query_options = _merge_query_options( expected_query_options, query_options ) expected_request = ExecuteSqlRequest( session=self.SESSION_NAME, sql=DML_QUERY_WITH_PARAM, transaction=expected_transaction, params=expected_params, param_types=PARAM_TYPES, query_mode=MODE, query_options=expected_query_options, seqno=count, ) api.execute_sql.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertEqual(transaction._execute_sql_count, count + 1)
def _make_txn_selector(self): """Helper for :meth:`read`. :rtype: :class:`~.transaction_pb2.TransactionSelector` :returns: a selector configured for read-write transaction semantics. """ self._check_state() return TransactionSelector(id=self._transaction_id)
def _batch_update_helper(self, error_after=None, count=0): from google.rpc.status_pb2 import Status from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1 import param_types from google.cloud.spanner_v1 import ResultSet from google.cloud.spanner_v1 import ResultSetStats from google.cloud.spanner_v1 import ExecuteBatchDmlRequest from google.cloud.spanner_v1 import ExecuteBatchDmlResponse from google.cloud.spanner_v1 import TransactionSelector from google.cloud.spanner_v1._helpers import _make_value_pb insert_dml = "INSERT INTO table(pkey, desc) VALUES (%pkey, %desc)" insert_params = {"pkey": 12345, "desc": "DESCRIPTION"} insert_param_types = {"pkey": param_types.INT64, "desc": param_types.STRING} update_dml = 'UPDATE table SET desc = desc + "-amended"' delete_dml = "DELETE FROM table WHERE desc IS NULL" dml_statements = [ (insert_dml, insert_params, insert_param_types), update_dml, delete_dml, ] stats_pbs = [ ResultSetStats(row_count_exact=1), ResultSetStats(row_count_exact=2), ResultSetStats(row_count_exact=3), ] if error_after is not None: stats_pbs = stats_pbs[:error_after] expected_status = Status(code=400) else: expected_status = Status(code=200) expected_row_counts = [stats.row_count_exact for stats in stats_pbs] response = ExecuteBatchDmlResponse( status=expected_status, result_sets=[ResultSet(stats=stats_pb) for stats_pb in stats_pbs], ) database = _Database() api = database.spanner_api = self._make_spanner_api() api.execute_batch_dml.return_value = response session = _Session(database) transaction = self._make_one(session) transaction._transaction_id = self.TRANSACTION_ID transaction._execute_sql_count = count status, row_counts = transaction.batch_update(dml_statements) self.assertEqual(status, expected_status) self.assertEqual(row_counts, expected_row_counts) expected_transaction = TransactionSelector(id=self.TRANSACTION_ID) expected_insert_params = Struct( fields={ key: _make_value_pb(value) for (key, value) in insert_params.items() } ) expected_statements = [ ExecuteBatchDmlRequest.Statement( sql=insert_dml, params=expected_insert_params, param_types=insert_param_types, ), ExecuteBatchDmlRequest.Statement(sql=update_dml), ExecuteBatchDmlRequest.Statement(sql=delete_dml), ] expected_request = ExecuteBatchDmlRequest( session=self.SESSION_NAME, transaction=expected_transaction, statements=expected_statements, seqno=count, ) api.execute_batch_dml.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertEqual(transaction._execute_sql_count, count + 1)
def _partition_query_helper(self, multi_use, w_txn, size=None, max_partitions=None): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1 import Partition from google.cloud.spanner_v1 import PartitionOptions from google.cloud.spanner_v1 import PartitionQueryRequest from google.cloud.spanner_v1 import PartitionResponse from google.cloud.spanner_v1 import Transaction from google.cloud.spanner_v1 import TransactionSelector from google.cloud.spanner_v1._helpers import _make_value_pb new_txn_id = b"ABECAB91" token_1 = b"FACE0FFF" token_2 = b"BADE8CAF" response = PartitionResponse( partitions=[ Partition(partition_token=token_1), Partition(partition_token=token_2), ], transaction=Transaction(id=new_txn_id), ) database = _Database() api = database.spanner_api = self._make_spanner_api() api.partition_query.return_value = response session = _Session(database) derived = self._makeDerived(session) derived._multi_use = multi_use if w_txn: derived._transaction_id = TXN_ID tokens = list( derived.partition_query( SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, partition_size_bytes=size, max_partitions=max_partitions, ) ) self.assertEqual(tokens, [token_1, token_2]) expected_params = Struct( fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()} ) expected_txn_selector = TransactionSelector(id=TXN_ID) expected_partition_options = PartitionOptions( partition_size_bytes=size, max_partitions=max_partitions ) expected_request = PartitionQueryRequest( session=self.SESSION_NAME, sql=SQL_QUERY_WITH_PARAM, transaction=expected_txn_selector, params=expected_params, param_types=PARAM_TYPES, partition_options=expected_partition_options, ) api.partition_query.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertSpanAttributes( "CloudSpanner.PartitionReadWriteTransaction", status=StatusCanonicalCode.OK, attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), )
def _partition_read_helper( self, multi_use, w_txn, size=None, max_partitions=None, index=None ): from google.cloud.spanner_v1.keyset import KeySet from google.cloud.spanner_v1 import Partition from google.cloud.spanner_v1 import PartitionOptions from google.cloud.spanner_v1 import PartitionReadRequest from google.cloud.spanner_v1 import PartitionResponse from google.cloud.spanner_v1 import Transaction from google.cloud.spanner_v1 import TransactionSelector keyset = KeySet(all_=True) new_txn_id = b"ABECAB91" token_1 = b"FACE0FFF" token_2 = b"BADE8CAF" response = PartitionResponse( partitions=[ Partition(partition_token=token_1), Partition(partition_token=token_2), ], transaction=Transaction(id=new_txn_id), ) database = _Database() api = database.spanner_api = self._make_spanner_api() api.partition_read.return_value = response session = _Session(database) derived = self._makeDerived(session) derived._multi_use = multi_use if w_txn: derived._transaction_id = TXN_ID tokens = list( derived.partition_read( TABLE_NAME, COLUMNS, keyset, index=index, partition_size_bytes=size, max_partitions=max_partitions, ) ) self.assertEqual(tokens, [token_1, token_2]) expected_txn_selector = TransactionSelector(id=TXN_ID) expected_partition_options = PartitionOptions( partition_size_bytes=size, max_partitions=max_partitions ) expected_request = PartitionReadRequest( session=self.SESSION_NAME, table=TABLE_NAME, columns=COLUMNS, key_set=keyset._to_pb(), transaction=expected_txn_selector, index=index, partition_options=expected_partition_options, ) api.partition_read.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertSpanAttributes( "CloudSpanner.PartitionReadOnlyTransaction", status=StatusCanonicalCode.OK, attributes=dict( BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS) ), )
def _execute_sql_helper( self, multi_use, first=True, count=0, partition=None, sql_count=0, query_options=None, timeout=google.api_core.gapic_v1.method.DEFAULT, retry=google.api_core.gapic_v1.method.DEFAULT, ): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1 import ( PartialResultSet, ResultSetMetadata, ResultSetStats, ) from google.cloud.spanner_v1 import ( TransactionSelector, TransactionOptions, ) from google.cloud.spanner_v1 import ExecuteSqlRequest from google.cloud.spanner_v1 import Type, StructType from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1._helpers import ( _make_value_pb, _merge_query_options, ) VALUES = [[u"bharney", u"rhubbyl", 31], [u"phred", u"phlyntstone", 32]] MODE = 2 # PROFILE struct_type_pb = StructType( fields=[ StructType.Field(name="first_name", type_=Type(code=TypeCode.STRING)), StructType.Field(name="last_name", type_=Type(code=TypeCode.STRING)), StructType.Field(name="age", type_=Type(code=TypeCode.INT64)), ] ) metadata_pb = ResultSetMetadata(row_type=struct_type_pb) stats_pb = ResultSetStats( query_stats=Struct(fields={"rows_returned": _make_value_pb(2)}) ) result_sets = [ PartialResultSet(metadata=metadata_pb), PartialResultSet(stats=stats_pb), ] for i in range(len(result_sets)): result_sets[i].values.extend(VALUES[i]) iterator = _MockIterator(*result_sets) database = _Database() api = database.spanner_api = self._make_spanner_api() api.execute_streaming_sql.return_value = iterator session = _Session(database) derived = self._makeDerived(session) derived._multi_use = multi_use derived._read_request_count = count derived._execute_sql_count = sql_count if not first: derived._transaction_id = TXN_ID result_set = derived.execute_sql( SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE, query_options=query_options, partition=partition, retry=retry, timeout=timeout, ) self.assertEqual(derived._read_request_count, count + 1) if multi_use: self.assertIs(result_set._source, derived) else: self.assertIsNone(result_set._source) self.assertEqual(list(result_set), VALUES) self.assertEqual(result_set.metadata, metadata_pb) self.assertEqual(result_set.stats, stats_pb) txn_options = TransactionOptions( read_only=TransactionOptions.ReadOnly(strong=True) ) if multi_use: if first: expected_transaction = TransactionSelector(begin=txn_options) else: expected_transaction = TransactionSelector(id=TXN_ID) else: expected_transaction = TransactionSelector(single_use=txn_options) expected_params = Struct( fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()} ) expected_query_options = database._instance._client._query_options if query_options: expected_query_options = _merge_query_options( expected_query_options, query_options ) expected_request = ExecuteSqlRequest( session=self.SESSION_NAME, sql=SQL_QUERY_WITH_PARAM, transaction=expected_transaction, params=expected_params, param_types=PARAM_TYPES, query_mode=MODE, query_options=expected_query_options, partition_token=partition, seqno=sql_count, ) api.execute_streaming_sql.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], timeout=timeout, retry=retry, ) self.assertEqual(derived._execute_sql_count, sql_count + 1) self.assertSpanAttributes( "CloudSpanner.ReadWriteTransaction", status=StatusCanonicalCode.OK, attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), )
def _read_helper(self, multi_use, first=True, count=0, partition=None): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1 import ( PartialResultSet, ResultSetMetadata, ResultSetStats, ) from google.cloud.spanner_v1 import ( TransactionSelector, TransactionOptions, ) from google.cloud.spanner_v1 import ReadRequest from google.cloud.spanner_v1 import Type, StructType from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1.keyset import KeySet from google.cloud.spanner_v1._helpers import _make_value_pb VALUES = [[u"bharney", 31], [u"phred", 32]] struct_type_pb = StructType( fields=[ StructType.Field(name="name", type_=Type(code=TypeCode.STRING)), StructType.Field(name="age", type_=Type(code=TypeCode.INT64)), ] ) metadata_pb = ResultSetMetadata(row_type=struct_type_pb) stats_pb = ResultSetStats( query_stats=Struct(fields={"rows_returned": _make_value_pb(2)}) ) result_sets = [ PartialResultSet(metadata=metadata_pb), PartialResultSet(stats=stats_pb), ] for i in range(len(result_sets)): result_sets[i].values.extend(VALUES[i]) KEYS = [["*****@*****.**"], ["*****@*****.**"]] keyset = KeySet(keys=KEYS) INDEX = "email-address-index" LIMIT = 20 database = _Database() api = database.spanner_api = self._make_spanner_api() api.streaming_read.return_value = _MockIterator(*result_sets) session = _Session(database) derived = self._makeDerived(session) derived._multi_use = multi_use derived._read_request_count = count if not first: derived._transaction_id = TXN_ID if partition is not None: # 'limit' and 'partition' incompatible result_set = derived.read( TABLE_NAME, COLUMNS, keyset, index=INDEX, partition=partition ) else: result_set = derived.read( TABLE_NAME, COLUMNS, keyset, index=INDEX, limit=LIMIT ) self.assertEqual(derived._read_request_count, count + 1) if multi_use: self.assertIs(result_set._source, derived) else: self.assertIsNone(result_set._source) self.assertEqual(list(result_set), VALUES) self.assertEqual(result_set.metadata, metadata_pb) self.assertEqual(result_set.stats, stats_pb) txn_options = TransactionOptions( read_only=TransactionOptions.ReadOnly(strong=True) ) if multi_use: if first: expected_transaction = TransactionSelector(begin=txn_options) else: expected_transaction = TransactionSelector(id=TXN_ID) else: expected_transaction = TransactionSelector(single_use=txn_options) if partition is not None: expected_limit = 0 else: expected_limit = LIMIT expected_request = ReadRequest( session=self.SESSION_NAME, table=TABLE_NAME, columns=COLUMNS, key_set=keyset._to_pb(), transaction=expected_transaction, index=INDEX, limit=expected_limit, partition_token=partition, ) api.streaming_read.assert_called_once_with( request=expected_request, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertSpanAttributes( "CloudSpanner.ReadOnlyTransaction", attributes=dict( BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS) ), )