Пример #1
0
 def _make_partial_result_set(
         values, metadata=None, stats=None, chunked_value=False):
     from google.cloud.proto.spanner.v1.result_set_pb2 import (
         PartialResultSet)
     return PartialResultSet(
         values=values,
         metadata=metadata,
         stats=stats,
         chunked_value=chunked_value,
     )
Пример #2
0
def _generate_partial_result_sets(prs_text_pbs):
    from google.protobuf.json_format import Parse
    from google.cloud.proto.spanner.v1.result_set_pb2 import PartialResultSet

    partial_result_sets = []

    for prs_text_pb in prs_text_pbs:
        prs = PartialResultSet()
        partial_result_sets.append(Parse(prs_text_pb, prs))

    return partial_result_sets
    def _execute_sql_helper(self, multi_use, first=True, count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.proto.spanner.v1.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.proto.spanner.v1.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.proto.spanner.v1.type_pb2 import Type, StructType
        from google.cloud.proto.spanner.v1.type_pb2 import STRING, INT64
        from google.cloud.spanner._helpers import _make_value_pb

        TXN_ID = b'DEADBEEF'
        VALUES = [
            [u'bharney', u'rhubbyl', 31],
            [u'phred', u'phlyntstone', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        MODE = 2  # PROFILE
        TOKEN = b'DEADBEEF'
        struct_type_pb = StructType(fields=[
            StructType.Field(name='first_name', type=Type(code=STRING)),
            StructType.Field(name='last_name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _execute_streaming_sql_response=iterator)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(SQL_QUERY_WITH_PARAM,
                                         PARAMS,
                                         PARAM_TYPES,
                                         query_mode=MODE)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        result_set.consume_all()

        self.assertEqual(list(result_set.rows), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, sql, transaction, params, param_types, resume_token,
         query_mode, options) = api._executed_streaming_sql_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(sql, SQL_QUERY_WITH_PARAM)
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, TXN_ID)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        expected_params = Struct(fields={
            key: _make_value_pb(value)
            for (key, value) in PARAMS.items()
        })
        self.assertEqual(params, expected_params)
        self.assertEqual(param_types, PARAM_TYPES)
        self.assertEqual(query_mode, MODE)
        self.assertEqual(resume_token, b'')
        self.assertEqual(options.kwargs['metadata'],
                         [('google-cloud-resource-prefix', database.name)])
    def _read_helper(self, multi_use, first=True, count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.proto.spanner.v1.result_set_pb2 import (
            PartialResultSet, ResultSetMetadata, ResultSetStats)
        from google.cloud.proto.spanner.v1.transaction_pb2 import (
            TransactionSelector)
        from google.cloud.proto.spanner.v1.type_pb2 import Type, StructType
        from google.cloud.proto.spanner.v1.type_pb2 import STRING, INT64
        from google.cloud.spanner.keyset import KeySet
        from google.cloud.spanner._helpers import _make_value_pb

        TXN_ID = b'DEADBEEF'
        VALUES = [
            [u'bharney', 31],
            [u'phred', 32],
        ]
        VALUE_PBS = [[_make_value_pb(item) for item in row] for row in VALUES]
        struct_type_pb = StructType(fields=[
            StructType.Field(name='name', type=Type(code=STRING)),
            StructType.Field(name='age', type=Type(code=INT64)),
        ])
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(query_stats=Struct(
            fields={
                'rows_returned': _make_value_pb(2),
            }))
        result_sets = [
            PartialResultSet(values=VALUE_PBS[0], metadata=metadata_pb),
            PartialResultSet(values=VALUE_PBS[1], stats=stats_pb),
        ]
        KEYS = ['*****@*****.**', '*****@*****.**']
        KEYSET = KeySet(keys=KEYS)
        INDEX = 'email-address-index'
        LIMIT = 20
        TOKEN = b'DEADBEEF'
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _streaming_read_response=_MockIterator(*result_sets))
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.read(TABLE_NAME,
                                  COLUMNS,
                                  KEYSET,
                                  index=INDEX,
                                  limit=LIMIT)

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        result_set.consume_all()

        self.assertEqual(list(result_set.rows), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        (r_session, table, columns, key_set, transaction, index, limit,
         resume_token, options) = api._streaming_read_with

        self.assertEqual(r_session, self.SESSION_NAME)
        self.assertEqual(table, TABLE_NAME)
        self.assertEqual(columns, COLUMNS)
        self.assertEqual(key_set, KEYSET.to_pb())
        self.assertIsInstance(transaction, TransactionSelector)
        if multi_use:
            if first:
                self.assertTrue(transaction.begin.read_only.strong)
            else:
                self.assertEqual(transaction.id, TXN_ID)
        else:
            self.assertTrue(transaction.single_use.read_only.strong)
        self.assertEqual(index, INDEX)
        self.assertEqual(limit, LIMIT)
        self.assertEqual(resume_token, b'')
        self.assertEqual(options.kwargs['metadata'],
                         [('google-cloud-resource-prefix', database.name)])