Exemplo n.º 1
0
    def _consume_next(self):
        """Consume the next partial result set from the stream.

        Parse the result set into new/existing rows in :attr:`_rows`
        """
        response = six.next(self._response_iterator)
        response_pb = PartialResultSet.pb(response)

        if self._metadata is None:  # first response
            metadata = self._metadata = response.metadata

            source = self._source
            if source is not None and source._transaction_id is None:
                source._transaction_id = metadata.transaction.id

        if response_pb.HasField("stats"):  # last response
            self._stats = response.stats

        values = list(response_pb.values)
        if self._pending_chunk is not None:
            values[0] = self._merge_chunk(values[0])

        if response_pb.chunked_value:
            self._pending_chunk = values.pop()

        self._merge_values(values)
Exemplo n.º 2
0
def _generate_partial_result_sets(prs_text_pbs):
    from google.cloud.spanner_v1 import PartialResultSet

    partial_result_sets = []

    for prs_text_pb in prs_text_pbs:
        prs = PartialResultSet.from_json(prs_text_pb)
        partial_result_sets.append(prs)

    return partial_result_sets
Exemplo n.º 3
0
    def _make_partial_result_set(
        values, metadata=None, stats=None, chunked_value=False
    ):
        from google.cloud.spanner_v1 import PartialResultSet

        results = PartialResultSet(
            metadata=metadata, stats=stats, chunked_value=chunked_value
        )
        for v in values:
            results.values.append(v)
        return results
Exemplo n.º 4
0
    def _execute_sql_helper(
        self,
        multi_use,
        first=True,
        count=0,
        partition=None,
        sql_count=0,
        query_options=None,
        timeout=google.api_core.gapic_v1.method.DEFAULT,
        retry=google.api_core.gapic_v1.method.DEFAULT,
    ):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1 import (
            PartialResultSet,
            ResultSetMetadata,
            ResultSetStats,
        )
        from google.cloud.spanner_v1 import (
            TransactionSelector,
            TransactionOptions,
        )
        from google.cloud.spanner_v1 import ExecuteSqlRequest
        from google.cloud.spanner_v1 import Type, StructType
        from google.cloud.spanner_v1 import TypeCode
        from google.cloud.spanner_v1._helpers import (
            _make_value_pb,
            _merge_query_options,
        )

        VALUES = [[u"bharney", u"rhubbyl", 31], [u"phred", u"phlyntstone", 32]]
        MODE = 2  # PROFILE
        struct_type_pb = StructType(
            fields=[
                StructType.Field(name="first_name", type_=Type(code=TypeCode.STRING)),
                StructType.Field(name="last_name", type_=Type(code=TypeCode.STRING)),
                StructType.Field(name="age", type_=Type(code=TypeCode.INT64)),
            ]
        )
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={"rows_returned": _make_value_pb(2)})
        )
        result_sets = [
            PartialResultSet(metadata=metadata_pb),
            PartialResultSet(stats=stats_pb),
        ]
        for i in range(len(result_sets)):
            result_sets[i].values.extend(VALUES[i])
        iterator = _MockIterator(*result_sets)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_streaming_sql.return_value = iterator
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        derived._execute_sql_count = sql_count
        if not first:
            derived._transaction_id = TXN_ID

        result_set = derived.execute_sql(
            SQL_QUERY_WITH_PARAM,
            PARAMS,
            PARAM_TYPES,
            query_mode=MODE,
            query_options=query_options,
            partition=partition,
            retry=retry,
            timeout=timeout,
        )

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(strong=True)
        )

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        expected_query_options = database._instance._client._query_options
        if query_options:
            expected_query_options = _merge_query_options(
                expected_query_options, query_options
            )

        expected_request = ExecuteSqlRequest(
            session=self.SESSION_NAME,
            sql=SQL_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            query_options=expected_query_options,
            partition_token=partition,
            seqno=sql_count,
        )
        api.execute_streaming_sql.assert_called_once_with(
            request=expected_request,
            metadata=[("google-cloud-resource-prefix", database.name)],
            timeout=timeout,
            retry=retry,
        )

        self.assertEqual(derived._execute_sql_count, sql_count + 1)

        self.assertSpanAttributes(
            "CloudSpanner.ReadWriteTransaction",
            status=StatusCanonicalCode.OK,
            attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}),
        )
Exemplo n.º 5
0
    def _read_helper(self, multi_use, first=True, count=0, partition=None):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1 import (
            PartialResultSet,
            ResultSetMetadata,
            ResultSetStats,
        )
        from google.cloud.spanner_v1 import (
            TransactionSelector,
            TransactionOptions,
        )
        from google.cloud.spanner_v1 import ReadRequest
        from google.cloud.spanner_v1 import Type, StructType
        from google.cloud.spanner_v1 import TypeCode
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud.spanner_v1._helpers import _make_value_pb

        VALUES = [[u"bharney", 31], [u"phred", 32]]
        struct_type_pb = StructType(
            fields=[
                StructType.Field(name="name", type_=Type(code=TypeCode.STRING)),
                StructType.Field(name="age", type_=Type(code=TypeCode.INT64)),
            ]
        )
        metadata_pb = ResultSetMetadata(row_type=struct_type_pb)
        stats_pb = ResultSetStats(
            query_stats=Struct(fields={"rows_returned": _make_value_pb(2)})
        )
        result_sets = [
            PartialResultSet(metadata=metadata_pb),
            PartialResultSet(stats=stats_pb),
        ]
        for i in range(len(result_sets)):
            result_sets[i].values.extend(VALUES[i])
        KEYS = [["*****@*****.**"], ["*****@*****.**"]]
        keyset = KeySet(keys=KEYS)
        INDEX = "email-address-index"
        LIMIT = 20
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.streaming_read.return_value = _MockIterator(*result_sets)
        session = _Session(database)
        derived = self._makeDerived(session)
        derived._multi_use = multi_use
        derived._read_request_count = count
        if not first:
            derived._transaction_id = TXN_ID

        if partition is not None:  # 'limit' and 'partition' incompatible
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset, index=INDEX, partition=partition
            )
        else:
            result_set = derived.read(
                TABLE_NAME, COLUMNS, keyset, index=INDEX, limit=LIMIT
            )

        self.assertEqual(derived._read_request_count, count + 1)

        if multi_use:
            self.assertIs(result_set._source, derived)
        else:
            self.assertIsNone(result_set._source)

        self.assertEqual(list(result_set), VALUES)
        self.assertEqual(result_set.metadata, metadata_pb)
        self.assertEqual(result_set.stats, stats_pb)

        txn_options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(strong=True)
        )

        if multi_use:
            if first:
                expected_transaction = TransactionSelector(begin=txn_options)
            else:
                expected_transaction = TransactionSelector(id=TXN_ID)
        else:
            expected_transaction = TransactionSelector(single_use=txn_options)

        if partition is not None:
            expected_limit = 0
        else:
            expected_limit = LIMIT

        expected_request = ReadRequest(
            session=self.SESSION_NAME,
            table=TABLE_NAME,
            columns=COLUMNS,
            key_set=keyset._to_pb(),
            transaction=expected_transaction,
            index=INDEX,
            limit=expected_limit,
            partition_token=partition,
        )
        api.streaming_read.assert_called_once_with(
            request=expected_request,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertSpanAttributes(
            "CloudSpanner.ReadOnlyTransaction",
            attributes=dict(
                BASE_ATTRIBUTES, table_id=TABLE_NAME, columns=tuple(COLUMNS)
            ),
        )