Exemple #1
0
    def test_empty(self):
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64

        struct_type_pb = StructType(fields=[
            StructType.Field(name="name", type=Type(code=STRING)),
            StructType.Field(name="age", type=Type(code=INT64)),
        ])

        self.assertEqual(self._callFUT(rows=[], row_type=struct_type_pb), [])
    def _make_struct_type(struct_type_fields):
        from google.cloud.spanner_v1.proto.type_pb2 import StructType
        from google.cloud.spanner_v1.proto.type_pb2 import Type

        fields = [
            StructType.Field(name=key, type=Type(code=value))
            for key, value in struct_type_fields
        ]
        struct_type = StructType(fields=fields)
        return Type(code="STRUCT", struct_type=struct_type)
Exemple #3
0
    def test_non_empty(self):
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_list_value_pbs

        VALUES = [[u"phred", 32], [u"bharney", 31]]
        struct_type_pb = StructType(fields=[
            StructType.Field(name="name", type=Type(code=STRING)),
            StructType.Field(name="age", type=Type(code=INT64)),
        ])
        values_pbs = _make_list_value_pbs(VALUES)

        self.assertEqual(
            self._callFUT(rows=values_pbs, row_type=struct_type_pb), VALUES)
Exemple #4
0
    def test_w_struct(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRUCT, STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_list_value_pb

        VALUES = [u"phred", 32]
        struct_type_pb = StructType(fields=[
            StructType.Field(name="name", type=Type(code=STRING)),
            StructType.Field(name="age", type=Type(code=INT64)),
        ])
        field_type = Type(code=STRUCT, struct_type=struct_type_pb)
        value_pb = Value(list_value=_make_list_value_pb(VALUES))

        self.assertEqual(self._callFUT(value_pb, field_type), VALUES)
    def test__merge_chunk_array_of_array_of_string(self):
        from google.cloud.spanner_v1.proto.type_pb2 import StructType
        from google.cloud.spanner_v1.proto.type_pb2 import Type

        subarray_type = Type(code="ARRAY",
                             array_element_type=Type(code="STRING"))
        array_type = Type(code="ARRAY", array_element_type=subarray_type)
        iterator = _MockCancellableIterator()
        streamed = self._make_one(iterator)
        FIELDS = [StructType.Field(name="lolos", type=array_type)]
        streamed._metadata = self._make_result_set_metadata(FIELDS)
        streamed._pending_chunk = self._make_list_value(value_pbs=[
            self._make_list_value([u"A", u"B"]),
            self._make_list_value([u"C"]),
        ])
        chunk = self._make_list_value(value_pbs=[
            self._make_list_value([u"D"]),
            self._make_list_value([u"E", u"F"]),
        ])

        merged = streamed._merge_chunk(chunk)

        expected = self._make_list_value(value_pbs=[
            self._make_list_value([u"A", u"B"]),
            self._make_list_value([u"CD"]),
            self._make_list_value([u"E", u"F"]),
        ])
        self.assertEqual(merged, expected)
        self.assertIsNone(streamed._pending_chunk)
    def test__merge_chunk_array_of_array_of_int(self):
        from google.cloud.spanner_v1.proto.type_pb2 import StructType
        from google.cloud.spanner_v1.proto.type_pb2 import Type

        subarray_type = Type(code='ARRAY',
                             array_element_type=Type(code='INT64'))
        array_type = Type(code='ARRAY', array_element_type=subarray_type)
        iterator = _MockCancellableIterator()
        streamed = self._make_one(iterator)
        FIELDS = [StructType.Field(name='loloi', type=array_type)]
        streamed._metadata = self._make_result_set_metadata(FIELDS)
        streamed._pending_chunk = self._make_list_value(value_pbs=[
            self._make_list_value([0, 1]),
            self._make_list_value([2]),
        ])
        chunk = self._make_list_value(value_pbs=[
            self._make_list_value([3]),
            self._make_list_value([4, 5]),
        ])

        merged = streamed._merge_chunk(chunk)

        expected = self._make_list_value(value_pbs=[
            self._make_list_value([0, 1]),
            self._make_list_value([23]),
            self._make_list_value([4, 5]),
        ])
        self.assertEqual(merged, expected)
        self.assertIsNone(streamed._pending_chunk)
    def _make_array_field(name, element_type_code=None, element_type=None):
        from google.cloud.spanner_v1.proto.type_pb2 import StructType
        from google.cloud.spanner_v1.proto.type_pb2 import Type

        if element_type is None:
            element_type = Type(code=element_type_code)
        array_type = Type(code="ARRAY", array_element_type=element_type)
        return StructType.Field(name=name, type=array_type)
    def _make_scalar_field(name, type_):
        from google.cloud.spanner_v1.proto.type_pb2 import StructType
        from google.cloud.spanner_v1.proto.type_pb2 import Type

        return StructType.Field(name=name, type=Type(code=type_))
    def _execute_sql_helper(self,
                            multi_use,
                            first=True,
                            count=0,
                            partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_value_pb

        txn_id = b'DEADBEEF'
        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _execute_streaming_sql_response=iterator)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = txn_id

        result_set = derived.execute_sql(SQL_QUERY_WITH_PARAM,
                                         PARAMS,
                                         PARAM_TYPES,
                                         query_mode=MODE,
                                         partition=partition)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, sql, transaction, params, param_types, resume_token,
         query_mode, partition_token,
         metadata) = api._executed_streaming_sql_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, txn_id)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(query_mode, MODE)
        self.assertEqual(resume_token, b'')
        self.assertEqual(partition_token, partition)
        self.assertEqual(metadata,
                         [('google-cloud-resource-prefix', database.name)])
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        txn_id = b'DEADBEEF'
        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = ['*****@*****.**', '*****@*****.**']
        keyset = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _streaming_read_response=_MockIterator(*result_sets))
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = txn_id

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      partition=partition)
        else:
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, table, columns, key_set, transaction, index, limit,
         resume_token, r_partition, metadata) = api._streaming_read_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(table, TABLE_NAME)
        self.assertEqual(columns, COLUMNS)
        self.assertEqual(key_set, keyset._to_pb())
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, txn_id)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        self.assertEqual(index, INDEX)
        if partition is not None:
            self.assertEqual(limit, 0)
            self.assertEqual(r_partition, partition)
        else:
            self.assertEqual(limit, LIMIT)
            self.assertIsNone(r_partition)
        self.assertEqual(resume_token, b'')
        self.assertEqual(metadata,
                         [('google-cloud-resource-prefix', database.name)])
Exemple #11
0
    def _execute_sql_helper(self,
                            multi_use,
                            first=True,
                            count=0,
                            partition=None,
                            sql_count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector, TransactionOptions)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_streaming_sql.return_value = iterator
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        derived._execute_sql_count = sql_count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(SQL_QUERY_WITH_PARAM,
                                         PARAMS,
                                         PARAM_TYPES,
                                         query_mode=MODE,
                                         partition=partition)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(read_only=TransactionOptions.ReadOnly(
            strong=True))

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })

        api.execute_streaming_sql.assert_called_once_with(
            self.SESSION_NAME,
            SQL_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            partition_token=partition,
            seqno=sql_count,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )

        self.assertEqual(derived._execute_sql_count, sql_count + 1)
Exemple #12
0
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector, TransactionOptions)
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = [['*****@*****.**'], ['*****@*****.**']]
        keyset = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.streaming_read.return_value = _MockIterator(*result_sets)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      partition=partition)
        else:
            result_set = derived.read(TABLE_NAME,
                                      COLUMNS,
                                      keyset,
                                      index=INDEX,
                                      limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(read_only=TransactionOptions.ReadOnly(
            strong=True))

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        if partition is not None:
            expected_limit = 0
        else:
            expected_limit = LIMIT

        api.streaming_read.assert_called_once_with(
            self.SESSION_NAME,
            TABLE_NAME,
            COLUMNS,
            keyset._to_pb(),
            transaction=expected_transaction,
            index=INDEX,
            limit=expected_limit,
            partition_token=partition,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )
Exemple #13
0
    def _execute_sql_helper(
        self,
        multi_use,
        first=True,
        count=0,
        partition=None,
        sql_count=0,
        query_options=None,
        timeout=google.api_core.gapic_v1.method.DEFAULT,
        retry=google.api_core.gapic_v1.method.DEFAULT,
    ):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            PartialResultSet,
            ResultSetMetadata,
            ResultSetStats,
        )
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionSelector,
            TransactionOptions,
        )
        from google.cloud.spanner_v1.proto.type_pb2 import Type, StructType
        from google.cloud.spanner_v1.proto.type_pb2 import STRING, INT64
        from google.cloud.spanner_v1._helpers import (
            _make_value_pb,
            _merge_query_options,
        )

        VALUES = [[u"bharney", u"rhubbyl", 31], [u"phred", u"phlyntstone", 32]]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(
            fields=[
                StructType.Field(name="first_name", type=Type(code=STRING)),
                StructType.Field(name="last_name", type=Type(code=STRING)),
                StructType.Field(name="age", type=Type(code=INT64)),
            ]
        )
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={"rows_returned": _make_value_pb(2)})
        )
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_streaming_sql.return_value = iterator
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        derived._execute_sql_count = sql_count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(
            SQL_QUERY_WITH_PARAM,
            PARAMS,
            PARAM_TYPES,
            query_mode=MODE,
            query_options=query_options,
            partition=partition,
            retry=retry,
            timeout=timeout,
        )

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(strong=True)
        )

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        expected_query_options = database._instance._client._query_options
        if query_options:
            expected_query_options = _merge_query_options(
                expected_query_options, query_options
            )

        api.execute_streaming_sql.assert_called_once_with(
            self.SESSION_NAME,
            SQL_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            query_options=expected_query_options,
            partition_token=partition,
            seqno=sql_count,
            metadata=[("google-cloud-resource-prefix", database.name)],
            timeout=timeout,
            retry=retry,
        )

        self.assertEqual(derived._execute_sql_count, sql_count + 1)

        self.assertSpanAttributes(
            "CloudSpanner.ReadWriteTransaction",
            status=StatusCanonicalCode.OK,
            attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}),
        )