def test_found(self):
        from google.cloud.firestore_v1.proto import document_pb2
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.firestore_v1.document import DocumentSnapshot

        now = datetime.datetime.utcnow()
        read_time = _datetime_to_pb_timestamp(now)
        delta = datetime.timedelta(seconds=100)
        update_time = _datetime_to_pb_timestamp(now - delta)
        create_time = _datetime_to_pb_timestamp(now - 2 * delta)

        ref_string = self._dummy_ref_string()
        document_pb = document_pb2.Document(
            name=ref_string,
            fields={
                "foo": document_pb2.Value(double_value=1.5),
                "bar": document_pb2.Value(string_value=u"skillz"),
            },
            create_time=create_time,
            update_time=update_time,
        )
        response_pb = _make_batch_response(found=document_pb, read_time=read_time)

        reference_map = {ref_string: mock.sentinel.reference}
        snapshot = self._call_fut(response_pb, reference_map)
        self.assertIsInstance(snapshot, DocumentSnapshot)
        self.assertIs(snapshot._reference, mock.sentinel.reference)
        self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
        self.assertTrue(snapshot._exists)
        self.assertEqual(snapshot.read_time, read_time)
        self.assertEqual(snapshot.create_time, create_time)
        self.assertEqual(snapshot.update_time, update_time)
Пример #2
0
    def _make_txn_selector(self):
        """Helper for :meth:`read`."""
        if self._transaction_id is not None:
            return TransactionSelector(id=self._transaction_id)

        if self._read_timestamp:
            key = 'read_timestamp'
            value = _datetime_to_pb_timestamp(self._read_timestamp)
        elif self._min_read_timestamp:
            key = 'min_read_timestamp'
            value = _datetime_to_pb_timestamp(self._min_read_timestamp)
        elif self._max_staleness:
            key = 'max_staleness'
            value = _timedelta_to_duration_pb(self._max_staleness)
        elif self._exact_staleness:
            key = 'exact_staleness'
            value = _timedelta_to_duration_pb(self._exact_staleness)
        else:
            key = 'strong'
            value = True

        options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(**{key: value}))

        if self._multi_use:
            return TransactionSelector(begin=options)
        else:
            return TransactionSelector(single_use=options)
def _make_query_response(**kwargs):
    # kwargs supported are ``skipped_results``, ``name`` and ``data``
    from google.cloud.firestore_v1beta1.proto import document_pb2
    from google.cloud.firestore_v1beta1.proto import firestore_pb2
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.firestore_v1beta1 import _helpers

    now = datetime.datetime.utcnow()
    read_time = _datetime_to_pb_timestamp(now)
    kwargs['read_time'] = read_time

    name = kwargs.pop('name', None)
    data = kwargs.pop('data', None)
    if name is not None and data is not None:
        document_pb = document_pb2.Document(
            name=name,
            fields=_helpers.encode_dict(data),
        )
        delta = datetime.timedelta(seconds=100)
        update_time = _datetime_to_pb_timestamp(now - delta)
        create_time = _datetime_to_pb_timestamp(now - 2 * delta)
        document_pb.update_time.CopyFrom(update_time)
        document_pb.create_time.CopyFrom(create_time)

        kwargs['document'] = document_pb

    return firestore_pb2.RunQueryResponse(**kwargs)
Пример #4
0
    def _make_txn_selector(self):
        """Helper for :meth:`read`."""
        if self._transaction_id is not None:
            return TransactionSelector(id=self._transaction_id)

        if self._read_timestamp:
            key = "read_timestamp"
            value = _datetime_to_pb_timestamp(self._read_timestamp)
        elif self._min_read_timestamp:
            key = "min_read_timestamp"
            value = _datetime_to_pb_timestamp(self._min_read_timestamp)
        elif self._max_staleness:
            key = "max_staleness"
            value = _timedelta_to_duration_pb(self._max_staleness)
        elif self._exact_staleness:
            key = "exact_staleness"
            value = _timedelta_to_duration_pb(self._exact_staleness)
        else:
            key = "strong"
            value = True

        options = TransactionOptions(
            read_only=TransactionOptions.ReadOnly(**{key: value})
        )

        if self._multi_use:
            return TransactionSelector(begin=options)
        else:
            return TransactionSelector(single_use=options)
    def test_create_span_default(self):
        from google.cloud.trace_v2.gapic import trace_service_client
        from google.cloud.trace._gapic import _dict_mapping_to_pb
        from google.cloud._helpers import _datetime_to_pb_timestamp

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        api = self._make_one(gapic_api, None)
        api.create_span(
            name=self.span_name,
            span_id=self.span_id,
            display_name=_str_to_truncatablestr(self.display_name),
            start_time=self.start_time,
            end_time=self.end_time)

        display_name_pb = _dict_mapping_to_pb(
            _str_to_truncatablestr(self.display_name), 'TruncatableString')
        start_time_pb = _datetime_to_pb_timestamp(self.start_time)
        end_time_pb = _datetime_to_pb_timestamp(self.end_time)

        gapic_api.create_span.assert_called_with(
            name=self.span_name,
            span_id=self.span_id,
            display_name=display_name_pb,
            start_time=start_time_pb,
            end_time=end_time_pb,
            parent_span_id=None,
            attributes=None,
            stack_trace=None,
            time_events=None,
            links=None,
            status=None,
            same_process_as_parent_span=None,
            child_span_count=None)
Пример #6
0
def _make_query_response(**kwargs):
    # kwargs supported are ``skipped_results``, ``name`` and ``data``
    from google.cloud.firestore_v1beta1.proto import document_pb2
    from google.cloud.firestore_v1beta1.proto import firestore_pb2
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.firestore_v1beta1 import _helpers

    now = datetime.datetime.utcnow()
    read_time = _datetime_to_pb_timestamp(now)
    kwargs['read_time'] = read_time

    name = kwargs.pop('name', None)
    data = kwargs.pop('data', None)
    if name is not None and data is not None:
        document_pb = document_pb2.Document(
            name=name,
            fields=_helpers.encode_dict(data),
        )
        delta = datetime.timedelta(seconds=100)
        update_time = _datetime_to_pb_timestamp(now - delta)
        create_time = _datetime_to_pb_timestamp(now - 2 * delta)
        document_pb.update_time.CopyFrom(update_time)
        document_pb.create_time.CopyFrom(create_time)

        kwargs['document'] = document_pb

    return firestore_pb2.RunQueryResponse(**kwargs)
Пример #7
0
    def test_create_span_default(self):
        from google.cloud.trace_v2.gapic import trace_service_client
        from google.cloud.trace._gapic import _dict_mapping_to_pb
        from google.cloud._helpers import _datetime_to_pb_timestamp

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        api = self._make_one(gapic_api, None)
        api.create_span(
            name=self.span_name,
            span_id=self.span_id,
            display_name=_str_to_truncatablestr(self.display_name),
            start_time=self.start_time,
            end_time=self.end_time,
        )

        display_name_pb = _dict_mapping_to_pb(
            _str_to_truncatablestr(self.display_name), "TruncatableString")
        start_time_pb = _datetime_to_pb_timestamp(self.start_time)
        end_time_pb = _datetime_to_pb_timestamp(self.end_time)

        gapic_api.create_span.assert_called_with(
            name=self.span_name,
            span_id=self.span_id,
            display_name=display_name_pb,
            start_time=start_time_pb,
            end_time=end_time_pb,
            parent_span_id=None,
            attributes=None,
            stack_trace=None,
            time_events=None,
            links=None,
            status=None,
            same_process_as_parent_span=None,
            child_span_count=None,
        )
Пример #8
0
    def test_patch_traces(self):
        from google.cloud.trace_v1.gapic import trace_service_client
        from google.cloud.trace_v1.proto.trace_pb2 import (TraceSpan, Trace,
                                                           Traces)
        from google.cloud.trace.v1._gapic import _traces_mapping_to_pb
        from google.cloud._helpers import _datetime_to_pb_timestamp

        trace_id = 'test_trace_id'
        span_id = 1234
        span_name = 'test_span_name'
        start_time = datetime.datetime.utcnow()
        end_time = datetime.datetime.utcnow()

        traces = {
            'traces': [
                {
                    'projectId':
                    self.project,
                    'traceId':
                    trace_id,
                    'spans': [
                        {
                            'spanId': span_id,
                            'name': span_name,
                            'startTime': start_time.isoformat() + 'Z',
                            'endTime': end_time.isoformat() + 'Z',
                        },
                    ],
                },
            ],
        }

        traces_pb = _traces_mapping_to_pb(traces)

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        _, api = self._make_one(gapic_api, None)
        api.patch_traces(project_id=self.project, traces=traces)

        gapic_api.patch_traces.assert_called_with(self.project, traces_pb)

        call_args = gapic_api.patch_traces.call_args[0]
        self.assertEqual(len(call_args), 2)
        traces_called = call_args[1]
        self.assertEqual(len(traces_called.traces), 1)
        trace = traces_called.traces[0]

        self.assertEqual(len(trace.spans), 1)
        span = trace.spans[0]

        self.assertIsInstance(traces_called, Traces)
        self.assertEqual(trace.project_id, self.project)
        self.assertEqual(trace.trace_id, trace_id)
        self.assertIsInstance(trace, Trace)

        self.assertEqual(span.span_id, span_id)
        self.assertEqual(span.name, span_name)
        self.assertEqual(span.start_time,
                         _datetime_to_pb_timestamp(start_time))
        self.assertEqual(span.end_time, _datetime_to_pb_timestamp(end_time))
        self.assertIsInstance(span, TraceSpan)
Пример #9
0
    def test_found(self):
        from google.cloud.firestore_v1.proto import document_pb2
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.firestore_v1.document import DocumentSnapshot

        now = datetime.datetime.utcnow()
        read_time = _datetime_to_pb_timestamp(now)
        delta = datetime.timedelta(seconds=100)
        update_time = _datetime_to_pb_timestamp(now - delta)
        create_time = _datetime_to_pb_timestamp(now - 2 * delta)

        ref_string = self._dummy_ref_string()
        document_pb = document_pb2.Document(
            name=ref_string,
            fields={
                "foo": document_pb2.Value(double_value=1.5),
                "bar": document_pb2.Value(string_value=u"skillz"),
            },
            create_time=create_time,
            update_time=update_time,
        )
        response_pb = _make_batch_response(found=document_pb,
                                           read_time=read_time)

        reference_map = {ref_string: mock.sentinel.reference}
        snapshot = self._call_fut(response_pb, reference_map)
        self.assertIsInstance(snapshot, DocumentSnapshot)
        self.assertIs(snapshot._reference, mock.sentinel.reference)
        self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
        self.assertTrue(snapshot._exists)
        self.assertEqual(snapshot.read_time, read_time)
        self.assertEqual(snapshot.create_time, create_time)
        self.assertEqual(snapshot.update_time, update_time)
Пример #10
0
async def test_document_delete(client, cleanup):
    document_id = "deleted" + UNIQUE_RESOURCE_ID
    document = client.document("here-to-be", document_id)
    # Add to clean-up before API request (in case ``create()`` fails).
    cleanup(document.delete)
    await document.create({"not": "much"})

    # 1. Call ``delete()`` with invalid (in the past) "last timestamp" option.
    snapshot1 = await document.get()
    timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
    timestamp_pb.seconds += 3600

    option1 = client.write_option(last_update_time=timestamp_pb)
    # TODO(microgen):invalid argument thrown after microgen.
    # with pytest.raises(FailedPrecondition):
    with pytest.raises(InvalidArgument):
        await document.delete(option=option1)

    # 2. Call ``delete()`` with invalid (in future) "last timestamp" option.
    timestamp_pb = _datetime_to_pb_timestamp(snapshot1.update_time)
    timestamp_pb.seconds += 3600

    option2 = client.write_option(last_update_time=timestamp_pb)
    # TODO(microgen):invalid argument thrown after microgen.
    # with pytest.raises(FailedPrecondition):
    with pytest.raises(InvalidArgument):
        await document.delete(option=option2)

    # 3. Actually ``delete()`` the document.
    delete_time3 = await document.delete()

    # 4. ``delete()`` again, even though we know the document is gone.
    delete_time4 = await document.delete()
    assert_timestamp_less(delete_time3, delete_time4)
    def test_create_span_explicit(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.trace._gapic import (
            _dict_mapping_to_pb,
            _span_attrs_to_pb,
            _status_mapping_to_pb,
            _value_to_pb,
        )
        from google.cloud.trace_v2.gapic import trace_service_client

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        api = self._make_one(gapic_api, None)
        api.create_span(
            name=self.span_name,
            span_id=self.span_id,
            display_name=_str_to_truncatablestr(self.display_name),
            start_time=self.start_time,
            end_time=self.end_time,
            parent_span_id=self.parent_span_id,
            attributes=self.attributes,
            stack_trace=self.stack_trace,
            time_events=self.time_events,
            links=self.links,
            status=self.status,
            same_process_as_parent_span=self.same_process_as_parent_span,
            child_span_count=self.child_span_count,
        )

        display_name_pb = _dict_mapping_to_pb(
            _str_to_truncatablestr(self.display_name), "TruncatableString"
        )
        start_time_pb = _datetime_to_pb_timestamp(self.start_time)
        end_time_pb = _datetime_to_pb_timestamp(self.end_time)
        attributes_pb = _span_attrs_to_pb(self.attributes, "Attributes")
        stack_trace_pb = _dict_mapping_to_pb(self.stack_trace, "StackTrace")
        time_events_pb = _span_attrs_to_pb(self.time_events, "TimeEvents")
        links_pb = _span_attrs_to_pb(self.links, "Links")
        status_pb = _status_mapping_to_pb(self.status)
        same_process_as_parent_span_pb = _value_to_pb(
            self.same_process_as_parent_span, "BoolValue"
        )
        child_span_count_pb = _value_to_pb(self.child_span_count, "Int32Value")

        gapic_api.create_span.assert_called_with(
            name=self.span_name,
            span_id=self.span_id,
            display_name=display_name_pb,
            start_time=start_time_pb,
            end_time=end_time_pb,
            parent_span_id=self.parent_span_id,
            attributes=attributes_pb,
            stack_trace=stack_trace_pb,
            time_events=time_events_pb,
            links=links_pb,
            status=status_pb,
            same_process_as_parent_span=same_process_as_parent_span_pb,
            child_span_count=child_span_count_pb,
        )
    def test_create_span_explicit(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.trace._gapic import (
            _dict_mapping_to_pb,
            _span_attrs_to_pb,
            _status_mapping_to_pb,
            _value_to_pb,
        )
        from google.cloud.trace_v2.gapic import trace_service_client

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        api = self._make_one(gapic_api, None)
        api.create_span(
            name=self.span_name,
            span_id=self.span_id,
            display_name=_str_to_truncatablestr(self.display_name),
            start_time=self.start_time,
            end_time=self.end_time,
            parent_span_id=self.parent_span_id,
            attributes=self.attributes,
            stack_trace=self.stack_trace,
            time_events=self.time_events,
            links=self.links,
            status=self.status,
            same_process_as_parent_span=self.same_process_as_parent_span,
            child_span_count=self.child_span_count,
        )

        display_name_pb = _dict_mapping_to_pb(
            _str_to_truncatablestr(self.display_name), "TruncatableString"
        )
        start_time_pb = _datetime_to_pb_timestamp(self.start_time)
        end_time_pb = _datetime_to_pb_timestamp(self.end_time)
        attributes_pb = _span_attrs_to_pb(self.attributes, "Attributes")
        stack_trace_pb = _dict_mapping_to_pb(self.stack_trace, "StackTrace")
        time_events_pb = _span_attrs_to_pb(self.time_events, "TimeEvents")
        links_pb = _span_attrs_to_pb(self.links, "Links")
        status_pb = _status_mapping_to_pb(self.status)
        same_process_as_parent_span_pb = _value_to_pb(
            self.same_process_as_parent_span, "BoolValue"
        )
        child_span_count_pb = _value_to_pb(self.child_span_count, "Int32Value")

        gapic_api.create_span.assert_called_with(
            name=self.span_name,
            span_id=self.span_id,
            display_name=display_name_pb,
            start_time=start_time_pb,
            end_time=end_time_pb,
            parent_span_id=self.parent_span_id,
            attributes=attributes_pb,
            stack_trace=stack_trace_pb,
            time_events=time_events_pb,
            links=links_pb,
            status=status_pb,
            same_process_as_parent_span=same_process_as_parent_span_pb,
            child_span_count=child_span_count_pb,
        )
Пример #13
0
    def test_patch_traces(self):
        from google.cloud.trace_v1.gapic import trace_service_client
        from google.cloud.trace_v1.proto.trace_pb2 import TraceSpan, Trace, Traces
        from google.cloud.trace.v1._gapic import _traces_mapping_to_pb
        from google.cloud._helpers import _datetime_to_pb_timestamp

        trace_id = "test_trace_id"
        span_id = 1234
        span_name = "test_span_name"
        start_time = datetime.datetime.utcnow()
        end_time = datetime.datetime.utcnow()

        traces = {
            "traces": [
                {
                    "projectId": self.project,
                    "traceId": trace_id,
                    "spans": [
                        {
                            "spanId": span_id,
                            "name": span_name,
                            "startTime": start_time.isoformat() + "Z",
                            "endTime": end_time.isoformat() + "Z",
                        }
                    ],
                }
            ]
        }

        traces_pb = _traces_mapping_to_pb(traces)

        gapic_api = mock.Mock(spec=trace_service_client.TraceServiceClient)
        _, api = self._make_one(gapic_api, None)
        api.patch_traces(project_id=self.project, traces=traces)

        gapic_api.patch_traces.assert_called_with(self.project, traces_pb)

        call_args = gapic_api.patch_traces.call_args[0]
        self.assertEqual(len(call_args), 2)
        traces_called = call_args[1]
        self.assertEqual(len(traces_called.traces), 1)
        trace = traces_called.traces[0]

        self.assertEqual(len(trace.spans), 1)
        span = trace.spans[0]

        self.assertIsInstance(traces_called, Traces)
        self.assertEqual(trace.project_id, self.project)
        self.assertEqual(trace.trace_id, trace_id)
        self.assertIsInstance(trace, Trace)

        self.assertEqual(span.span_id, span_id)
        self.assertEqual(span.name, span_name)
        self.assertEqual(span.start_time, _datetime_to_pb_timestamp(start_time))
        self.assertEqual(span.end_time, _datetime_to_pb_timestamp(end_time))
        self.assertIsInstance(span, TraceSpan)
    def test_list_traces_explicit(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.gapic.trace.v1.enums import ListTracesRequest as Enum
        from google.cloud.trace._gax import _TraceAPI

        from datetime import datetime

        def list_traces(project_id,
                        view=None,
                        page_size=None,
                        start_time=None,
                        end_time=None,
                        filter_=None,
                        order_by=None,
                        page_token=None):
            _list_traces_called_with = (project_id, view, page_size,
                                        start_time, end_time, filter_,
                                        order_by, page_token)
            return _list_traces_called_with

        credentials = _make_credentials()
        client = self._make_one(project=self.project, credentials=credentials)

        mock_trace_api = mock.Mock(spec=_TraceAPI)
        mock_trace_api.list_traces = list_traces
        patch = mock.patch('google.cloud.trace.client.make_gax_trace_api',
                           return_value=mock_trace_api)

        view = Enum.ViewType.COMPLETE
        page_size = 10
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()
        filter_ = '+span:span1'
        order_by = 'traceId'
        page_token = 'TOKEN'

        with patch:
            list_traces_called_with = client.list_traces(
                project_id=self.project,
                view=view,
                page_size=page_size,
                start_time=start_time,
                end_time=end_time,
                filter_=filter_,
                order_by=order_by,
                page_token=page_token)

        self.assertEqual(list_traces_called_with,
                         (self.project, view, page_size,
                          _datetime_to_pb_timestamp(start_time),
                          _datetime_to_pb_timestamp(end_time), filter_,
                          order_by, page_token))
Пример #15
0
def encode_value(value):
    """Converts a native Python value into a Firestore protobuf ``Value``.

    Args:
        value (Union[NoneType, bool, int, float, datetime.datetime, \
            str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native
            Python value to convert to a protobuf field.

    Returns:
        ~google.cloud.firestore_v1beta1.types.Value: A
        value encoded as a Firestore protobuf.

    Raises:
        TypeError: If the ``value`` is not one of the accepted types.
    """
    if value is None:
        return document_pb2.Value(null_value=struct_pb2.NULL_VALUE)

    # Must come before six.integer_types since ``bool`` is an integer subtype.
    if isinstance(value, bool):
        return document_pb2.Value(boolean_value=value)

    if isinstance(value, six.integer_types):
        return document_pb2.Value(integer_value=value)

    if isinstance(value, float):
        return document_pb2.Value(double_value=value)

    if isinstance(value, datetime.datetime):
        return document_pb2.Value(
            timestamp_value=_datetime_to_pb_timestamp(value))

    if isinstance(value, six.text_type):
        return document_pb2.Value(string_value=value)

    if isinstance(value, six.binary_type):
        return document_pb2.Value(bytes_value=value)

    # NOTE: We avoid doing an isinstance() check for a Document
    #       here to avoid import cycles.
    document_path = getattr(value, '_document_path', None)
    if document_path is not None:
        return document_pb2.Value(reference_value=document_path)

    if isinstance(value, GeoPoint):
        return document_pb2.Value(geo_point_value=value.to_protobuf())

    if isinstance(value, list):
        value_list = [encode_value(element) for element in value]
        value_pb = document_pb2.ArrayValue(values=value_list)
        return document_pb2.Value(array_value=value_pb)

    if isinstance(value, dict):
        value_dict = encode_dict(value)
        value_pb = document_pb2.MapValue(fields=value_dict)
        return document_pb2.Value(map_value=value_pb)

    raise TypeError(
        'Cannot convert to a Firestore Value', value,
        'Invalid type', type(value))
    def _instance_api_response_for_update(self):
        import datetime
        from google.api_core import operation
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
        from google.cloud.bigtable_admin_v2.proto import (
            bigtable_instance_admin_pb2 as messages_v2_pb2,
        )
        from google.cloud.bigtable_admin_v2.types import instance_pb2

        NOW = datetime.datetime.utcnow()
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        metadata = messages_v2_pb2.UpdateInstanceMetadata(request_time=NOW_PB)
        type_url = "type.googleapis.com/{}".format(
            messages_v2_pb2.UpdateInstanceMetadata.DESCRIPTOR.full_name
        )
        response_pb = operations_pb2.Operation(
            name=self.OP_NAME,
            metadata=Any(type_url=type_url, value=metadata.SerializeToString()),
        )
        response = operation.from_gapic(
            response_pb,
            mock.Mock(),
            instance_pb2.Instance,
            metadata_type=messages_v2_pb2.UpdateInstanceMetadata,
        )
        instance_path_template = "projects/{project}/instances/{instance}"
        instance_api = mock.create_autospec(
            bigtable_instance_admin_client.BigtableInstanceAdminClient
        )
        instance_api.partial_update_instance.return_value = response
        instance_api.instance_path = instance_path_template.format
        return instance_api, response
    def test_run_in_transaction_w_args_w_kwargs_wo_abort(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB, TransactionOptions)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner_v1.transaction import Transaction

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = self._make_spanner_api()
        gax_api.begin_transaction.return_value = transaction_pb
        gax_api.commit.return_value = response
        database = self._make_database()
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = self.SESSION_ID

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)
            return 42

        return_value = session.run_in_transaction(
            unit_of_work, 'abc', some_arg='def')

        self.assertIsNone(session._transaction)
        self.assertEqual(len(called_with), 1)
        txn, args, kw = called_with[0]
        self.assertIsInstance(txn, Transaction)
        self.assertEqual(return_value, 42)
        self.assertEqual(args, ('abc',))
        self.assertEqual(kw, {'some_arg': 'def'})

        expected_options = TransactionOptions(
            read_write=TransactionOptions.ReadWrite(),
        )
        gax_api.begin_transaction.assert_called_once_with(
            self.SESSION_NAME,
            expected_options,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )
        gax_api.commit.assert_called_once_with(
            self.SESSION_NAME,
            txn._mutations,
            transaction_id=TRANSACTION_ID,
            metadata=[('google-cloud-resource-prefix', database.name)],
        )
Пример #18
0
    def test_run_in_transaction_w_abort_w_retry_metadata_deadline(self):
        import datetime
        from google.gax.errors import GaxError
        from google.gax.grpc import exc_to_code
        from grpc import StatusCode
        from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
        from google.cloud.proto.spanner.v1.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner.transaction import Transaction
        from google.cloud.spanner import session as MUT
        from google.cloud._testing import _Monkey

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        RETRY_SECONDS = 1
        RETRY_NANOS = 3456
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = _SpannerApi(
            _begin_transaction_response=transaction_pb,
            _commit_abort_count=1,
            _commit_abort_retry_seconds=RETRY_SECONDS,
            _commit_abort_retry_nanos=RETRY_NANOS,
            _commit_response=response,
        )
        database = _Database(self.DATABASE_NAME)
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)

        time_module = _FauxTimeModule()

        with _Monkey(MUT, time=time_module):
            with self.assertRaises(GaxError) as exc:
                session.run_in_transaction(
                    unit_of_work, 'abc', some_arg='def', timeout_secs=0.01)

        self.assertEqual(exc_to_code(exc.exception.cause), StatusCode.ABORTED)
        self.assertIsNone(time_module._slept)
        self.assertEqual(len(called_with), 1)
        txn, args, kw = called_with[0]
        self.assertIsInstance(txn, Transaction)
        self.assertIsNone(txn.committed)
        self.assertEqual(args, ('abc',))
        self.assertEqual(kw, {'some_arg': 'def'})
Пример #19
0
    def test_context_mgr_success(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionOptions)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(_commit_response=response)
        session = _Session(database)
        batch = self._make_one(session)

        with batch:
            batch.insert(TABLE_NAME, COLUMNS, VALUES)

        self.assertEqual(batch.committed, now)

        (session, mutations, single_use_txn, options) = api._committed
        self.assertEqual(session, self.SESSION_NAME)
        self.assertEqual(mutations, batch._mutations)
        self.assertIsInstance(single_use_txn, TransactionOptions)
        self.assertTrue(single_use_txn.HasField('read_write'))
        self.assertEqual(options.kwargs['metadata'],
                         [('google-cloud-resource-prefix', database.name)])
Пример #20
0
def test_backup_create_success():
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.bigtable_admin_v2.types import table
    from google.cloud.bigtable import Client

    op_future = object()
    credentials = _make_credentials()
    client = Client(project=PROJECT_ID, credentials=credentials, admin=True)
    api = client._table_admin_client = _make_table_admin_client()
    api.create_backup.return_value = op_future

    timestamp = _make_timestamp()
    backup = _make_backup(
        BACKUP_ID,
        _Instance(INSTANCE_NAME, client=client),
        table_id=TABLE_ID,
        expire_time=timestamp,
    )

    backup_pb = table.Backup(
        source_table=TABLE_NAME,
        expire_time=_datetime_to_pb_timestamp(timestamp),
    )

    future = backup.create(CLUSTER_ID)
    assert backup._cluster == CLUSTER_ID
    assert future is op_future

    api.create_backup.assert_called_once_with(request={
        "parent": CLUSTER_NAME,
        "backup_id": BACKUP_ID,
        "backup": backup_pb
    })
Пример #21
0
def test_backup_reload():
    from google.cloud.bigtable_admin_v2.types import table
    from google.cloud._helpers import _datetime_to_pb_timestamp

    timestamp = _datetime_to_pb_timestamp(_make_timestamp())
    state = table.Backup.State.READY

    client = _Client()
    backup_pb = table.Backup(
        name=BACKUP_NAME,
        source_table=TABLE_NAME,
        expire_time=timestamp,
        start_time=timestamp,
        end_time=timestamp,
        size_bytes=0,
        state=state,
    )
    api = client.table_admin_client = _make_table_admin_client()
    api.get_backup.return_value = backup_pb

    instance = _Instance(INSTANCE_NAME, client=client)
    backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID)

    backup.reload()
    assert backup._source_table == TABLE_NAME
    assert backup._expire_time == timestamp
    assert backup._start_time == timestamp
    assert backup._end_time == timestamp
    assert backup._size_bytes == 0
    assert backup._state == state
Пример #22
0
def test_backup_update_expire_time_w_grpc_error():
    from google.api_core.exceptions import Unknown
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.bigtable_admin_v2.types import table
    from google.protobuf import field_mask_pb2

    client = _Client()
    api = client.table_admin_client = _make_table_admin_client()
    api.update_backup.side_effect = Unknown("testing")
    instance = _Instance(INSTANCE_NAME, client=client)
    backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID)
    expire_time = _make_timestamp()

    with pytest.raises(Unknown):
        backup.update_expire_time(expire_time)

    backup_update = table.Backup(
        name=BACKUP_NAME,
        expire_time=_datetime_to_pb_timestamp(expire_time),
    )
    update_mask = field_mask_pb2.FieldMask(paths=["expire_time"])
    api.update_backup.assert_called_once_with(request={
        "backup": backup_update,
        "update_mask": update_mask
    })
Пример #23
0
def test_backup_create_w_already_exists():
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.bigtable_admin_v2.types import table
    from google.cloud.exceptions import Conflict

    client = _Client()
    api = client.table_admin_client = _make_table_admin_client()
    api.create_backup.side_effect = Conflict("testing")

    timestamp = _make_timestamp()
    backup = _make_backup(
        BACKUP_ID,
        _Instance(INSTANCE_NAME, client=client),
        table_id=TABLE_ID,
        expire_time=timestamp,
    )

    backup_pb = table.Backup(
        source_table=TABLE_NAME,
        expire_time=_datetime_to_pb_timestamp(timestamp),
    )

    with pytest.raises(Conflict):
        backup.create(CLUSTER_ID)

    api.create_backup.assert_called_once_with(request={
        "parent": CLUSTER_NAME,
        "backup_id": BACKUP_ID,
        "backup": backup_pb
    })
Пример #24
0
    def test_create_success(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.types import table
        from google.cloud.bigtable import Client

        op_future = object()
        credentials = _make_credentials()
        client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True)
        api = client._table_admin_client = self._make_table_admin_client()
        api.create_backup.return_value = op_future

        timestamp = self._make_timestamp()
        backup = self._make_one(
            self.BACKUP_ID,
            _Instance(self.INSTANCE_NAME, client=client),
            table_id=self.TABLE_ID,
            expire_time=timestamp,
        )

        backup_pb = table.Backup(
            source_table=self.TABLE_NAME,
            expire_time=_datetime_to_pb_timestamp(timestamp),
        )

        future = backup.create(self.CLUSTER_ID)
        self.assertEqual(backup._cluster, self.CLUSTER_ID)
        self.assertIs(future, op_future)

        api.create_backup.assert_called_once_with(
            request={
                "parent": self.CLUSTER_NAME,
                "backup_id": self.BACKUP_ID,
                "backup": backup_pb,
            }
        )
    def test_context_mgr_success(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        transaction_pb = TransactionPB(id=self.TRANSACTION_ID)
        database = _Database()
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _begin_transaction_response=transaction_pb,
            _commit_response=response)
        session = _Session(database)
        transaction = self._make_one(session)

        with transaction:
            transaction.insert(TABLE_NAME, COLUMNS, VALUES)

        self.assertEqual(transaction.committed, now)

        session_id, mutations, txn_id, metadata = api._committed
        self.assertEqual(session_id, self.SESSION_NAME)
        self.assertEqual(txn_id, self.TRANSACTION_ID)
        self.assertEqual(mutations, transaction._mutations)
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
Пример #26
0
    def test_reload(self):
        from google.cloud.bigtable_admin_v2.types import table
        from google.cloud._helpers import _datetime_to_pb_timestamp

        timestamp = _datetime_to_pb_timestamp(self._make_timestamp())
        state = table.Backup.State.READY

        client = _Client()
        backup_pb = table.Backup(
            name=self.BACKUP_NAME,
            source_table=self.TABLE_NAME,
            expire_time=timestamp,
            start_time=timestamp,
            end_time=timestamp,
            size_bytes=0,
            state=state,
        )
        api = client.table_admin_client = self._make_table_admin_client()
        api.get_backup.return_value = backup_pb

        instance = _Instance(self.INSTANCE_NAME, client=client)
        backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID)

        backup.reload()
        self.assertEqual(backup._source_table, self.TABLE_NAME)
        self.assertEqual(backup._expire_time, timestamp)
        self.assertEqual(backup._start_time, timestamp)
        self.assertEqual(backup._end_time, timestamp)
        self.assertEqual(backup._size_bytes, 0)
        self.assertEqual(backup._state, state)
Пример #27
0
    def test_context_mgr_failure(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _commit_response=response)
        session = _Session(database)
        batch = self._make_one(session)

        class _BailOut(Exception):
            pass

        with self.assertRaises(_BailOut):
            with batch:
                batch.insert(TABLE_NAME, COLUMNS, VALUES)
                raise _BailOut()

        self.assertEqual(batch.committed, None)
        self.assertEqual(api._committed, None)
        self.assertEqual(len(batch._mutations), 1)
    def test_commit_ok(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.keyset import KeySet
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        keys = [[0], [1], [2]]
        keyset = KeySet(keys=keys)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _commit_response=response)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction.delete(TABLE_NAME, keyset)

        transaction.commit()

        self.assertEqual(transaction.committed, now)
        self.assertIsNone(session._transaction)

        session_id, mutations, txn_id, metadata = api._committed
        self.assertEqual(session_id, session.name)
        self.assertEqual(txn_id, self.TRANSACTION_ID)
        self.assertEqual(mutations, transaction._mutations)
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
Пример #29
0
    def test_context_mgr_success(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            TransactionOptions)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _commit_response=response)
        session = _Session(database)
        batch = self._make_one(session)

        with batch:
            batch.insert(TABLE_NAME, COLUMNS, VALUES)

        self.assertEqual(batch.committed, now)

        (session, mutations, single_use_txn, metadata) = api._committed
        self.assertEqual(session, self.SESSION_NAME)
        self.assertEqual(mutations, batch._mutations)
        self.assertIsInstance(single_use_txn, TransactionOptions)
        self.assertTrue(single_use_txn.HasField('read_write'))
        self.assertEqual(
            metadata, [('google-cloud-resource-prefix', database.name)])
Пример #30
0
    def test_context_mgr_success(self):
        import datetime
        from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
        from google.cloud.proto.spanner.v1.transaction_pb2 import (
            TransactionOptions)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner.batch import Batch

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database(self.DATABASE_NAME)
        api = database.spanner_api = _FauxSpannerClient()
        api._commit_response = response
        pool = database._pool = _Pool()
        session = _Session(database)
        pool.put(session)
        checkout = self._make_one(database)

        with checkout as batch:
            self.assertIsNone(pool._session)
            self.assertIsInstance(batch, Batch)
            self.assertIs(batch._session, session)

        self.assertIs(pool._session, session)
        self.assertEqual(batch.committed, now)
        (session_name, mutations, single_use_txn,
         options) = api._committed
        self.assertIs(session_name, self.SESSION_NAME)
        self.assertEqual(mutations, [])
        self.assertIsInstance(single_use_txn, TransactionOptions)
        self.assertTrue(single_use_txn.HasField('read_write'))
        self.assertEqual(options.kwargs['metadata'],
                         [('google-cloud-resource-prefix', database.name)])
Пример #31
0
    def test_run_in_transaction_w_abort_w_retry_metadata(self):
        import datetime
        from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
        from google.cloud.proto.spanner.v1.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner.transaction import Transaction
        from google.cloud.spanner import session as MUT
        from google.cloud._testing import _Monkey

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        RETRY_SECONDS = 12
        RETRY_NANOS = 3456
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = _SpannerApi(
            _begin_transaction_response=transaction_pb,
            _commit_abort_count=1,
            _commit_abort_retry_seconds=RETRY_SECONDS,
            _commit_abort_retry_nanos=RETRY_NANOS,
            _commit_response=response,
        )
        database = _Database(self.DATABASE_NAME)
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)

        time_module = _FauxTimeModule()

        with _Monkey(MUT, time=time_module):
            committed = session.run_in_transaction(
                unit_of_work, 'abc', some_arg='def')

        self.assertEqual(time_module._slept,
                         RETRY_SECONDS + RETRY_NANOS / 1.0e9)
        self.assertEqual(committed, now)
        self.assertEqual(len(called_with), 2)
        for index, (txn, args, kw) in enumerate(called_with):
            self.assertIsInstance(txn, Transaction)
            if index == 1:
                self.assertEqual(txn.committed, committed)
            else:
                self.assertIsNone(txn.committed)
            self.assertEqual(args, ('abc',))
            self.assertEqual(kw, {'some_arg': 'def'})
Пример #32
0
    def test_context_mgr_success(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB,
        )
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        transaction_pb = TransactionPB(id=self.TRANSACTION_ID)
        database = _Database()
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _begin_transaction_response=transaction_pb, _commit_response=response
        )
        session = _Session(database)
        transaction = self._make_one(session)

        with transaction:
            transaction.insert(TABLE_NAME, COLUMNS, VALUES)

        self.assertEqual(transaction.committed, now)

        session_id, mutations, txn_id, metadata = api._committed
        self.assertEqual(session_id, self.SESSION_NAME)
        self.assertEqual(txn_id, self.TRANSACTION_ID)
        self.assertEqual(mutations, transaction._mutations)
        self.assertEqual(metadata, [("google-cloud-resource-prefix", database.name)])
Пример #33
0
    def test_create_instance_not_found(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.types import table
        from google.cloud.exceptions import NotFound

        client = _Client()
        api = client.table_admin_client = self._make_table_admin_client()
        api.create_backup.side_effect = NotFound("testing")

        timestamp = self._make_timestamp()
        backup = self._make_one(
            self.BACKUP_ID,
            _Instance(self.INSTANCE_NAME, client=client),
            table_id=self.TABLE_ID,
            expire_time=timestamp,
        )

        backup_pb = table.Backup(
            source_table=self.TABLE_NAME,
            expire_time=_datetime_to_pb_timestamp(timestamp),
        )

        with self.assertRaises(NotFound):
            backup.create(self.CLUSTER_ID)

        api.create_backup.assert_called_once_with(
            request={
                "parent": self.CLUSTER_NAME,
                "backup_id": self.BACKUP_ID,
                "backup": backup_pb,
            }
        )
Пример #34
0
    def test_commit_ok(self):
        import datetime
        from google.cloud.proto.spanner.v1.spanner_pb2 import CommitResponse
        from google.cloud.spanner.keyset import KeySet
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        keys = [[0], [1], [2]]
        keyset = KeySet(keys=keys)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(
            _commit_response=response)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction.delete(TABLE_NAME, keyset)

        transaction.commit()

        self.assertEqual(transaction.committed, now)
        self.assertIsNone(session._transaction)

        session_id, mutations, txn_id, options = api._committed
        self.assertEqual(session_id, session.name)
        self.assertEqual(txn_id, self.TRANSACTION_ID)
        self.assertEqual(mutations, transaction._mutations)
        self.assertEqual(options.kwargs['metadata'],
                         [('google-cloud-resource-prefix', database.name)])
Пример #35
0
    def test_context_mgr_failure(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        database = _Database()
        api = database.spanner_api = _FauxSpannerAPI(_commit_response=response)
        session = _Session(database)
        batch = self._make_one(session)

        class _BailOut(Exception):
            pass

        with self.assertRaises(_BailOut):
            with batch:
                batch.insert(TABLE_NAME, COLUMNS, VALUES)
                raise _BailOut()

        self.assertEqual(batch.committed, None)
        self.assertEqual(api._committed, None)
        self.assertEqual(len(batch._mutations), 1)
Пример #36
0
def _pb_attr_value(val):
    """Given a value, return the protobuf attribute name and proper value.

    The Protobuf API uses different attribute names based on value types
    rather than inferring the type.  This function simply determines the
    proper attribute name based on the type of the value provided and
    returns the attribute name as well as a properly formatted value.

    Certain value types need to be coerced into a different type (such
    as a `datetime.datetime` into an integer timestamp, or a
    `google.cloud.datastore.key.Key` into a Protobuf representation.  This
    function handles that for you.

    .. note::
       Values which are "text" ('unicode' in Python2, 'str' in Python3) map
       to 'string_value' in the datastore;  values which are "bytes"
       ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.

    For example:

    >>> _pb_attr_value(1234)
    ('integer_value', 1234)
    >>> _pb_attr_value('my_string')
    ('string_value', 'my_string')

    :type val: `datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
               bool, float, integer, string
    :param val: The value to be scrutinized.

    :rtype: tuple
    :returns: A tuple of the attribute name and proper value type.
    """

    if isinstance(val, datetime.datetime):
        name = 'timestamp'
        value = _datetime_to_pb_timestamp(val)
    elif isinstance(val, Key):
        name, value = 'key', val.to_protobuf()
    elif isinstance(val, bool):
        name, value = 'boolean', val
    elif isinstance(val, float):
        name, value = 'double', val
    elif isinstance(val, six.integer_types):
        name, value = 'integer', val
    elif isinstance(val, six.text_type):
        name, value = 'string', val
    elif isinstance(val, (bytes, str)):
        name, value = 'blob', val
    elif isinstance(val, Entity):
        name, value = 'entity', val
    elif isinstance(val, list):
        name, value = 'array', val
    elif isinstance(val, GeoPoint):
        name, value = 'geo_point', val.to_protobuf()
    elif val is None:
        name, value = 'null', struct_pb2.NULL_VALUE
    else:
        raise ValueError("Unknown protobuf attr type %s" % type(val))

    return name + '_value', value
Пример #37
0
def _instance_api_response_for_create():
    import datetime
    from google.api_core import operation
    from google.longrunning import operations_pb2
    from google.protobuf.any_pb2 import Any
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.bigtable_admin_v2.types import (
        bigtable_instance_admin as messages_v2_pb2, )
    from google.cloud.bigtable_admin_v2.types import instance

    NOW = datetime.datetime.utcnow()
    NOW_PB = _datetime_to_pb_timestamp(NOW)
    metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB)
    type_url = "type.googleapis.com/{}".format(
        messages_v2_pb2.CreateInstanceMetadata._meta._pb.DESCRIPTOR.full_name)
    response_pb = operations_pb2.Operation(
        name=OP_NAME,
        metadata=Any(type_url=type_url,
                     value=metadata._pb.SerializeToString()),
    )
    response = operation.from_gapic(
        response_pb,
        mock.Mock(),
        instance.Instance,
        metadata_type=messages_v2_pb2.CreateInstanceMetadata,
    )
    project_path_template = "projects/{}"
    location_path_template = "projects/{}/locations/{}"
    api = _make_instance_admin_api()
    api.create_instance.return_value = response
    api.project_path = project_path_template.format
    api.location_path = location_path_template.format
    api.common_location_path = location_path_template.format
    return api, response
Пример #38
0
    def _instance_api_response_for_update(self):
        import datetime
        from google.api_core import operation
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
        from google.cloud.bigtable_admin_v2.proto import (
            bigtable_instance_admin_pb2 as messages_v2_pb2, )
        from google.cloud.bigtable_admin_v2.types import instance_pb2

        NOW = datetime.datetime.utcnow()
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        metadata = messages_v2_pb2.UpdateInstanceMetadata(request_time=NOW_PB)
        type_url = "type.googleapis.com/{}".format(
            messages_v2_pb2.UpdateInstanceMetadata.DESCRIPTOR.full_name)
        response_pb = operations_pb2.Operation(
            name=self.OP_NAME,
            metadata=Any(type_url=type_url,
                         value=metadata.SerializeToString()),
        )
        response = operation.from_gapic(
            response_pb,
            mock.Mock(),
            instance_pb2.Instance,
            metadata_type=messages_v2_pb2.UpdateInstanceMetadata,
        )
        instance_path_template = "projects/{project}/instances/{instance}"
        instance_api = mock.create_autospec(
            bigtable_instance_admin_client.BigtableInstanceAdminClient)
        instance_api.partial_update_instance.return_value = response
        instance_api.instance_path = instance_path_template.format
        return instance_api, response
Пример #39
0
    def test_subscription_pull_explicit(self):
        import base64
        import datetime
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import _datetime_to_rfc3339

        NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        NOW_RFC3339 = _datetime_to_rfc3339(NOW)
        PAYLOAD = b"This is the message text"
        B64 = base64.b64encode(PAYLOAD).decode("ascii")
        ACK_ID = "DEADBEEF"
        MSG_ID = "BEADCAFE"
        MESSAGE = {"messageId": MSG_ID, "data": B64, "attributes": {"a": "b"}, "publishTime": NOW_RFC3339}
        RECEIVED = [{"ackId": ACK_ID, "message": MESSAGE}]
        message_pb = _PubsubMessagePB(MSG_ID, B64, {"a": "b"}, NOW_PB)
        response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)])
        gax_api = _GAXSubscriberAPI(_pull_response=response_pb)
        client = _Client(self.PROJECT)
        api = self._make_one(gax_api, client)
        MAX_MESSAGES = 10

        received = api.subscription_pull(self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES)

        self.assertEqual(received, RECEIVED)
        sub_path, max_messages, return_immediately, options = gax_api._pull_called_with
        self.assertEqual(sub_path, self.SUB_PATH)
        self.assertEqual(max_messages, MAX_MESSAGES)
        self.assertTrue(return_immediately)
        self.assertIsNone(options)
    def create(self):
        """Create this backup within its instance.

        :rtype: :class:`~google.api_core.operation.Operation`
        :returns: a future used to poll the status of the create request
        :raises Conflict: if the backup already exists
        :raises NotFound: if the instance owning the backup does not exist
        :raises BadRequest: if the database or expire_time values are invalid
                            or expire_time is not set
        """
        if not self._expire_time:
            raise ValueError("expire_time not set")
        if not self._database:
            raise ValueError("database not set")
        api = self._instance._client.database_admin_api
        metadata = _metadata_with_prefix(self.name)
        backup = {
            "database": self._database,
            "expire_time": _datetime_to_pb_timestamp(self.expire_time),
        }

        future = api.create_backup(self._instance.name,
                                   self.backup_id,
                                   backup,
                                   metadata=metadata)
        return future
Пример #41
0
def _pb_attr_value(val):
    """Given a value, return the protobuf attribute name and proper value.

    The Protobuf API uses different attribute names based on value types
    rather than inferring the type.  This function simply determines the
    proper attribute name based on the type of the value provided and
    returns the attribute name as well as a properly formatted value.

    Certain value types need to be coerced into a different type (such
    as a `datetime.datetime` into an integer timestamp, or a
    `google.cloud.datastore.key.Key` into a Protobuf representation.  This
    function handles that for you.

    .. note::
       Values which are "text" ('unicode' in Python2, 'str' in Python3) map
       to 'string_value' in the datastore;  values which are "bytes"
       ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.

    For example:

    >>> _pb_attr_value(1234)
    ('integer_value', 1234)
    >>> _pb_attr_value('my_string')
    ('string_value', 'my_string')

    :type val: `datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
               bool, float, integer, string
    :param val: The value to be scrutinized.

    :rtype: tuple
    :returns: A tuple of the attribute name and proper value type.
    """

    if isinstance(val, datetime.datetime):
        name = 'timestamp'
        value = _datetime_to_pb_timestamp(val)
    elif isinstance(val, Key):
        name, value = 'key', val.to_protobuf()
    elif isinstance(val, bool):
        name, value = 'boolean', val
    elif isinstance(val, float):
        name, value = 'double', val
    elif isinstance(val, six.integer_types):
        name, value = 'integer', val
    elif isinstance(val, six.text_type):
        name, value = 'string', val
    elif isinstance(val, (bytes, str)):
        name, value = 'blob', val
    elif isinstance(val, Entity):
        name, value = 'entity', val
    elif isinstance(val, list):
        name, value = 'array', val
    elif isinstance(val, GeoPoint):
        name, value = 'geo_point', val.to_protobuf()
    elif val is None:
        name, value = 'null', struct_pb2.NULL_VALUE
    else:
        raise ValueError("Unknown protobuf attr type %s" % type(val))

    return name + '_value', value
Пример #42
0
    def test_update_expire_time_success(self):
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.types import table
        from google.protobuf import field_mask_pb2

        client = _Client()
        api = client._table_admin_client = self._make_table_admin_client()
        api.update_backup.return_type = table.Backup(name=self.BACKUP_NAME)
        instance = _Instance(self.INSTANCE_NAME, client=client)
        backup = self._make_one(self.BACKUP_ID,
                                instance,
                                cluster_id=self.CLUSTER_ID)
        expire_time = self._make_timestamp()

        backup.update_expire_time(expire_time)

        backup_update = table.Backup(
            name=self.BACKUP_NAME,
            expire_time=_datetime_to_pb_timestamp(expire_time),
        )
        update_mask = field_mask_pb2.FieldMask(paths=["expire_time"])
        api.update_backup.assert_called_once_with(request={
            "backup": backup_update,
            "update_mask": update_mask
        })
Пример #43
0
    def test_update_expire_time_not_found(self):
        from google.api_core.exceptions import NotFound
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.types import table
        from google.protobuf import field_mask_pb2

        client = _Client()
        api = client._table_admin_client = self._make_table_admin_client()
        api.update_backup.side_effect = NotFound("testing")
        instance = _Instance(self.INSTANCE_NAME, client=client)
        backup = self._make_one(self.BACKUP_ID,
                                instance,
                                cluster_id=self.CLUSTER_ID)
        expire_time = self._make_timestamp()

        with self.assertRaises(NotFound):
            backup.update_expire_time(expire_time)

        backup_update = table.Backup(
            name=self.BACKUP_NAME,
            expire_time=_datetime_to_pb_timestamp(expire_time),
        )
        update_mask = field_mask_pb2.FieldMask(paths=["expire_time"])
        api.update_backup.assert_called_once_with(request={
            "backup": backup_update,
            "update_mask": update_mask
        })
Пример #44
0
    def test_create_grpc_error(self):
        from google.api_core.exceptions import GoogleAPICallError
        from google.api_core.exceptions import Unknown
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.types import table

        client = _Client()
        api = client._table_admin_client = self._make_table_admin_client()
        api.create_backup.side_effect = Unknown("testing")

        timestamp = self._make_timestamp()
        backup = self._make_one(
            self.BACKUP_ID,
            _Instance(self.INSTANCE_NAME, client=client),
            table_id=self.TABLE_ID,
            expire_time=timestamp,
        )

        backup_pb = table.Backup(
            source_table=self.TABLE_NAME,
            expire_time=_datetime_to_pb_timestamp(timestamp),
        )

        with self.assertRaises(GoogleAPICallError):
            backup.create(self.CLUSTER_ID)

        api.create_backup.assert_called_once_with(
            request={
                "parent": self.CLUSTER_NAME,
                "backup_id": self.BACKUP_ID,
                "backup": backup_pb,
            })
Пример #45
0
    def create(self, cluster_id=None):
        """Creates this backup within its instance.

        :type cluster_id: str
        :param cluster_id: (Optional) The ID of the Cluster for the newly
                           created Backup.

        :rtype: :class:`~google.api_core.operation.Operation`
        :returns: :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture`
                  instance, to be used to poll the status of the 'create' request
        :raises Conflict: if the Backup already exists
        :raises NotFound: if the Instance owning the Backup does not exist
        :raises BadRequest: if the `table` or `expire_time` values are invalid,
                            or `expire_time` is not set
        """
        if not self._expire_time:
            raise ValueError('"expire_time" parameter must be set')
            # TODO: Consider implementing a method that sets a default value of
            #  `expire_time`, e.g. 1 week from the creation of the Backup.
        if not self.table_id:
            raise ValueError('"table" parameter must be set')

        if cluster_id:
            self._cluster = cluster_id

        if not self._cluster:
            raise ValueError('"cluster" parameter must be set')

        backup = table_pb2.Backup(
            source_table=self.source_table,
            expire_time=_datetime_to_pb_timestamp(self.expire_time),
        )

        api = self._instance._client.table_admin_client
        return api.create_backup(self.parent, self.backup_id, backup)
Пример #46
0
    def test_run_in_transaction_w_abort_w_retry_metadata(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner_v1.transaction import Transaction
        from google.cloud.spanner_v1 import session as MUT
        from google.cloud._testing import _Monkey

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        RETRY_SECONDS = 12
        RETRY_NANOS = 3456
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = _SpannerApi(
            _begin_transaction_response=transaction_pb,
            _commit_abort_count=1,
            _commit_abort_retry_seconds=RETRY_SECONDS,
            _commit_abort_retry_nanos=RETRY_NANOS,
            _commit_response=response,
        )
        database = _Database(self.DATABASE_NAME)
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)

        time_module = _FauxTimeModule()

        with _Monkey(MUT, time=time_module):
            session.run_in_transaction(unit_of_work, 'abc', some_arg='def')

        self.assertEqual(time_module._slept,
                         RETRY_SECONDS + RETRY_NANOS / 1.0e9)
        self.assertEqual(len(called_with), 2)
        for index, (txn, args, kw) in enumerate(called_with):
            self.assertIsInstance(txn, Transaction)
            if index == 1:
                self.assertEqual(txn.committed, now)
            else:
                self.assertIsNone(txn.committed)
            self.assertEqual(args, ('abc',))
            self.assertEqual(kw, {'some_arg': 'def'})
Пример #47
0
    def test_list_entries_no_paging(self):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.gax import INITIAL_PAGE
        from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry

        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import UTC
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging import DESCENDING
        from google.cloud.logging.client import Client
        from google.cloud.logging.entries import TextEntry
        from google.cloud.logging.logger import Logger

        TOKEN = 'TOKEN'
        TEXT = 'TEXT'
        resource_pb = MonitoredResource(type='global')
        timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC)
        timestamp_pb = _datetime_to_pb_timestamp(timestamp)
        entry_pb = LogEntry(log_name=self.LOG_PATH,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            text_payload=TEXT)
        response = _GAXPageIterator([entry_pb], page_token=TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        client = Client(project=self.PROJECT, credentials=_make_credentials(),
                        use_gax=True)
        api = self._make_one(gax_api, client)

        iterator = api.list_entries(
            [self.PROJECT], self.FILTER, DESCENDING)
        entries = list(iterator)
        next_token = iterator.next_page_token

        # First check the token.
        self.assertEqual(next_token, TOKEN)
        # Then check the entries returned.
        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, TextEntry)
        self.assertEqual(entry.payload, TEXT)
        self.assertIsInstance(entry.logger, Logger)
        self.assertEqual(entry.logger.name, self.LOG_NAME)
        self.assertIsNone(entry.insert_id)
        self.assertEqual(entry.timestamp, timestamp)
        self.assertIsNone(entry.labels)
        self.assertIsNone(entry.severity)
        self.assertIsNone(entry.http_request)

        resource_names, projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(resource_names, [])
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, self.FILTER)
        self.assertEqual(order_by, DESCENDING)
        self.assertEqual(page_size, 0)
        self.assertIs(options.page_token, INITIAL_PAGE)
    def test_update(self):
        import datetime
        from google.api_core import operation
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.bigtable_admin_v2.proto import (
            bigtable_instance_admin_pb2 as messages_v2_pb2,
        )
        from google.cloud.bigtable_admin_v2.types import instance_pb2
        from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
        from google.cloud.bigtable.enums import StorageType

        NOW = datetime.datetime.utcnow()
        NOW_PB = _datetime_to_pb_timestamp(NOW)

        credentials = _make_credentials()
        client = self._make_client(
            project=self.PROJECT, credentials=credentials, admin=True
        )
        STORAGE_TYPE_SSD = StorageType.SSD
        instance = _Instance(self.INSTANCE_ID, client)
        cluster = self._make_one(
            self.CLUSTER_ID,
            instance,
            location_id=self.LOCATION_ID,
            serve_nodes=self.SERVE_NODES,
            default_storage_type=STORAGE_TYPE_SSD,
        )
        # Create expected_request
        expected_request = instance_pb2.Cluster(
            name=cluster.name, serve_nodes=self.SERVE_NODES
        )

        metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB)
        type_url = "type.googleapis.com/{}".format(
            messages_v2_pb2.UpdateClusterMetadata.DESCRIPTOR.full_name
        )
        response_pb = operations_pb2.Operation(
            name=self.OP_NAME,
            metadata=Any(type_url=type_url, value=metadata.SerializeToString()),
        )

        # Patch the stub used by the API method.
        channel = ChannelStub(responses=[response_pb])
        api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
            channel=channel
        )
        client._instance_admin_client = api

        # Perform the method and check the result.
        result = cluster.update()
        actual_request = channel.requests[0][1]

        self.assertEqual(actual_request, expected_request)
        self.assertIsInstance(result, operation.Operation)
        self.assertEqual(result.operation.name, self.OP_NAME)
        self.assertIsInstance(result.metadata, messages_v2_pb2.UpdateClusterMetadata)
Пример #49
0
    def test_create(self):
        import datetime
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.cloud.bigtable._generated import (
            bigtable_instance_admin_pb2 as messages_v2_pb2)
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from unit_tests._testing import _FakeStub
        from google.cloud.operation import Operation
        from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES

        NOW = datetime.datetime.utcnow()
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        client = _Client(self.PROJECT)
        instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID,
                                  display_name=self.DISPLAY_NAME)

        # Create response_pb
        metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB)
        type_url = 'type.googleapis.com/%s' % (
            messages_v2_pb2.CreateInstanceMetadata.DESCRIPTOR.full_name,)
        response_pb = operations_pb2.Operation(
            name=self.OP_NAME,
            metadata=Any(
                type_url=type_url,
                value=metadata.SerializeToString(),
                )
            )

        # Patch the stub used by the API method.
        client._instance_stub = stub = _FakeStub(response_pb)

        # Perform the method and check the result.
        result = instance.create()

        self.assertIsInstance(result, Operation)
        self.assertEqual(result.name, self.OP_NAME)
        self.assertIs(result.target, instance)
        self.assertIs(result.client, client)
        self.assertIsInstance(result.metadata,
                              messages_v2_pb2.CreateInstanceMetadata)
        self.assertEqual(result.metadata.request_time, NOW_PB)
        self.assertEqual(result.caller_metadata,
                         {'request_type': 'CreateInstance'})

        self.assertEqual(len(stub.method_calls), 1)
        api_name, args, kwargs = stub.method_calls[0]
        self.assertEqual(api_name, 'CreateInstance')
        request_pb, = args
        self.assertIsInstance(request_pb,
                              messages_v2_pb2.CreateInstanceRequest)
        self.assertEqual(request_pb.parent, 'projects/%s' % (self.PROJECT,))
        self.assertEqual(request_pb.instance_id, self.INSTANCE_ID)
        self.assertEqual(request_pb.instance.display_name, self.DISPLAY_NAME)
        cluster = request_pb.clusters[self.INSTANCE_ID]
        self.assertEqual(cluster.serve_nodes, DEFAULT_SERVE_NODES)
        self.assertEqual(kwargs, {})
def _doc_get_info(ref_string, values):
    from google.cloud.firestore_v1.proto import document_pb2
    from google.cloud._helpers import _datetime_to_pb_timestamp
    from google.cloud.firestore_v1 import _helpers

    now = datetime.datetime.utcnow()
    read_time = _datetime_to_pb_timestamp(now)
    delta = datetime.timedelta(seconds=100)
    update_time = _datetime_to_pb_timestamp(now - delta)
    create_time = _datetime_to_pb_timestamp(now - 2 * delta)

    document_pb = document_pb2.Document(
        name=ref_string,
        fields=_helpers.encode_dict(values),
        create_time=create_time,
        update_time=update_time,
    )

    return document_pb, read_time
Пример #51
0
    def _list_entries_with_paging_helper(self, payload, struct_pb):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import UTC
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging.client import Client
        from google.cloud.logging.entries import StructEntry
        from google.cloud.logging.logger import Logger

        SIZE = 23
        TOKEN = 'TOKEN'
        NEW_TOKEN = 'NEW_TOKEN'
        resource_pb = MonitoredResource(type='global')
        timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC)
        timestamp_pb = _datetime_to_pb_timestamp(timestamp)
        entry_pb = LogEntry(log_name=self.LOG_PATH,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            json_payload=struct_pb)
        response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        client = Client(project=self.PROJECT, credentials=_make_credentials(),
                        use_gax=True)
        api = self._make_one(gax_api, client)

        iterator = api.list_entries(
            [self.PROJECT], page_size=SIZE, page_token=TOKEN)
        entries = list(iterator)
        next_token = iterator.next_page_token

        # First check the token.
        self.assertEqual(next_token, NEW_TOKEN)
        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, StructEntry)
        self.assertEqual(entry.payload, payload)
        self.assertIsInstance(entry.logger, Logger)
        self.assertEqual(entry.logger.name, self.LOG_NAME)
        self.assertIsNone(entry.insert_id)
        self.assertEqual(entry.timestamp, timestamp)
        self.assertIsNone(entry.labels)
        self.assertIsNone(entry.severity)
        self.assertIsNone(entry.http_request)

        resource_names, projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(resource_names, [])
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, '')
        self.assertEqual(order_by, '')
        self.assertEqual(page_size, SIZE)
        self.assertEqual(options.page_token, TOKEN)
Пример #52
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and the keys
    expected in the JSON API.
    """
    # pylint: disable=too-many-branches
    entry_pb = LogEntry()

    optional_scalar_keys = {
        'logName': 'log_name',
        'insertId': 'insert_id',
        'textPayload': 'text_payload',
    }

    for key, pb_name in optional_scalar_keys.items():
        if key in mapping:
            setattr(entry_pb, pb_name, mapping[key])

    if 'resource' in mapping:
        entry_pb.resource.type = mapping['resource']['type']

    if 'severity' in mapping:
        severity = mapping['severity']
        if isinstance(severity, str):
            severity = LogSeverity.Value(severity)
        entry_pb.severity = severity

    if 'timestamp' in mapping:
        timestamp = _datetime_to_pb_timestamp(mapping['timestamp'])
        entry_pb.timestamp.CopyFrom(timestamp)

    if 'labels' in mapping:
        for key, value in mapping['labels'].items():
            entry_pb.labels[key] = value

    if 'jsonPayload' in mapping:
        for key, value in mapping['jsonPayload'].items():
            entry_pb.json_payload[key] = value

    if 'protoPayload' in mapping:
        Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)

    if 'httpRequest' in mapping:
        _http_request_mapping_to_pb(
            mapping['httpRequest'], entry_pb.http_request)

    if 'operation' in mapping:
        _log_operation_mapping_to_pb(
            mapping['operation'], entry_pb.operation)

    return entry_pb
Пример #53
0
    def test_run_in_transaction_w_abort_w_retry_metadata_deadline(self):
        import datetime
        from google.api_core.exceptions import Aborted
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner_v1 import session as MUT
        from google.cloud._testing import _Monkey

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        RETRY_SECONDS = 1
        RETRY_NANOS = 3456
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = _SpannerApi(
            _begin_transaction_response=transaction_pb,
            _commit_abort_count=1,
            _commit_abort_retry_seconds=RETRY_SECONDS,
            _commit_abort_retry_nanos=RETRY_NANOS,
            _commit_response=response,
        )
        database = _Database(self.DATABASE_NAME)
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)

        time_module = _FauxTimeModule()
        time_module._times = [1, 1.5]

        with _Monkey(MUT, time=time_module):
            with self.assertRaises(Aborted):
                session.run_in_transaction(
                    unit_of_work, 'abc', timeout_secs=1)

        self.assertIsNone(time_module._slept)
        self.assertEqual(len(called_with), 1)
Пример #54
0
    def test_run_in_transaction_w_abort_no_retry_metadata(self):
        import datetime
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB)
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud.spanner_v1.transaction import Transaction

        TABLE_NAME = 'citizens'
        COLUMNS = ['email', 'first_name', 'last_name', 'age']
        VALUES = [
            ['*****@*****.**', 'Phred', 'Phlyntstone', 32],
            ['*****@*****.**', 'Bharney', 'Rhubble', 31],
        ]
        TRANSACTION_ID = b'FACEDACE'
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        gax_api = _SpannerApi(
            _begin_transaction_response=transaction_pb,
            _commit_abort_count=1,
            _commit_response=response,
        )
        database = _Database(self.DATABASE_NAME)
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)
            return 'answer'

        return_value = session.run_in_transaction(
            unit_of_work, 'abc', some_arg='def')

        self.assertEqual(len(called_with), 2)
        for index, (txn, args, kw) in enumerate(called_with):
            self.assertIsInstance(txn, Transaction)
            self.assertEqual(return_value, 'answer')
            self.assertEqual(args, ('abc',))
            self.assertEqual(kw, {'some_arg': 'def'})
Пример #55
0
    def _make_log_entry_with_extras(self, labels, iid, type_url, now):
        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry
        from google.cloud.grpc.logging.v2.log_entry_pb2 import (
            LogEntryOperation)
        from google.logging.type.http_request_pb2 import HttpRequest
        from google.logging.type.log_severity_pb2 import WARNING
        from google.protobuf.any_pb2 import Any

        from google.cloud._helpers import _datetime_to_pb_timestamp

        resource_pb = MonitoredResource(
            type='global', labels=labels)
        proto_payload = Any(type_url=type_url)
        timestamp_pb = _datetime_to_pb_timestamp(now)
        request_pb = HttpRequest(
            request_url='http://example.com/requested',
            request_method='GET',
            status=200,
            referer='http://example.com/referer',
            user_agent='AGENT',
            cache_hit=True,
            request_size=256,
            response_size=1024,
            remote_ip='1.2.3.4',
        )
        operation_pb = LogEntryOperation(
            producer='PRODUCER',
            first=True,
            last=True,
            id='OPID',
        )
        entry_pb = LogEntry(log_name=self.LOG_PATH,
                            resource=resource_pb,
                            proto_payload=proto_payload,
                            timestamp=timestamp_pb,
                            severity=WARNING,
                            insert_id=iid,
                            http_request=request_pb,
                            labels=labels,
                            operation=operation_pb)
        return entry_pb
Пример #56
0
    def test_list_entries_no_paging(self):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.gax import INITIAL_PAGE
        from google.logging.v2.log_entry_pb2 import LogEntry

        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging import DESCENDING

        TOKEN = 'TOKEN'
        TEXT = 'TEXT'
        resource_pb = MonitoredResource(type='global')
        timestamp_pb = _datetime_to_pb_timestamp(
            datetime.datetime.utcnow())
        entry_pb = LogEntry(log_name=self.LOG_NAME,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            text_payload=TEXT)
        response = _GAXPageIterator([entry_pb], page_token=TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        api = self._makeOne(gax_api)

        entries, next_token = api.list_entries(
            [self.PROJECT], self.FILTER, DESCENDING)

        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, dict)
        self.assertEqual(entry['logName'], self.LOG_NAME)
        self.assertEqual(entry['resource'], {'type': 'global'})
        self.assertEqual(entry['textPayload'], TEXT)
        self.assertEqual(next_token, TOKEN)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, self.FILTER)
        self.assertEqual(order_by, DESCENDING)
        self.assertEqual(page_size, 0)
        self.assertIs(options.page_token, INITIAL_PAGE)
Пример #57
0
    def _list_entries_with_paging_helper(self, payload, struct_pb):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.logging.v2.log_entry_pb2 import LogEntry
        from google.cloud._testing import _GAXPageIterator
        from google.cloud._helpers import _datetime_to_pb_timestamp

        SIZE = 23
        TOKEN = 'TOKEN'
        NEW_TOKEN = 'NEW_TOKEN'
        resource_pb = MonitoredResource(type='global')
        timestamp_pb = _datetime_to_pb_timestamp(
            datetime.datetime.utcnow())
        entry_pb = LogEntry(log_name=self.LOG_NAME,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            json_payload=struct_pb)
        response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        api = self._makeOne(gax_api)

        entries, next_token = api.list_entries(
            [self.PROJECT], page_size=SIZE, page_token=TOKEN)

        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, dict)
        self.assertEqual(entry['logName'], self.LOG_NAME)
        self.assertEqual(entry['resource'], {'type': 'global'})
        self.assertEqual(entry['jsonPayload'], payload)
        self.assertEqual(next_token, NEW_TOKEN)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, '')
        self.assertEqual(order_by, '')
        self.assertEqual(page_size, SIZE)
        self.assertEqual(options.page_token, TOKEN)
Пример #58
0
    def test_subscription_pull_explicit(self):
        import base64
        import datetime
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import _datetime_to_rfc3339
        NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
        NOW_PB = _datetime_to_pb_timestamp(NOW)
        NOW_RFC3339 = _datetime_to_rfc3339(NOW)
        PAYLOAD = b'This is the message text'
        B64 = base64.b64encode(PAYLOAD).decode('ascii')
        ACK_ID = 'DEADBEEF'
        MSG_ID = 'BEADCAFE'
        MESSAGE = {
            'messageId': MSG_ID,
            'data': B64,
            'attributes': {'a': 'b'},
            'publishTime': NOW_RFC3339,
        }
        RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}]
        message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB)
        response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)])
        gax_api = _GAXSubscriberAPI(_pull_response=response_pb)
        api = self._makeOne(gax_api)
        MAX_MESSAGES = 10

        received = api.subscription_pull(
            self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES)

        self.assertEqual(received, RECEIVED)
        sub_path, max_messages, return_immediately, options = (
            gax_api._pull_called_with)
        self.assertEqual(sub_path, self.SUB_PATH)
        self.assertEqual(max_messages, MAX_MESSAGES)
        self.assertTrue(return_immediately)
        self.assertIsNone(options)
Пример #59
0
    def create_span(
        self,
        name,
        span_id,
        display_name,
        start_time,
        end_time,
        parent_span_id=None,
        attributes=None,
        stack_trace=None,
        time_events=None,
        links=None,
        status=None,
        same_process_as_parent_span=None,
        child_span_count=None,
        retry=method.DEFAULT,
        timeout=method.DEFAULT,
    ):
        """
        Creates a new Span.

        Example:
            >>> from google.cloud import trace_v2
            >>>
            >>> client = trace_v2.TraceServiceClient()
            >>>
            >>> name = client.span_path('[PROJECT]', '[TRACE]', '[SPAN]')
            >>> span_id = ''
            >>> display_name = {}
            >>> start_time = {}
            >>> end_time = {}
            >>>
            >>> response = client.create_span(name, span_id, display_name,
                                              start_time, end_time)

        Args:
            name (str): The resource name of the span in the following format:

                ::

                    projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID]

                [TRACE_ID] is a unique identifier for a trace within a project.
                [SPAN_ID] is a unique identifier for a span within a trace,
                assigned when the span is created.
            span_id (str): The [SPAN_ID] portion of the span's resource name.
                The ID is a 16-character hexadecimal encoding of an 8-byte
                array.
            display_name (dict): A description of the span's operation
                (up to 128 bytes). Stackdriver Trace displays the description
                in the {% dynamic print site_values.console_name %}.
                For example, the display name can be a qualified method name
                or a file name and a line number where the operation is called.
                A best practice is to use the same display name within an
                application and at the same call point. This makes it easier to
                correlate spans in different traces.
                Contains two fields, value is the truncated name,
                truncatedByteCount is the number of bytes removed from the
                original string. If 0, then the string was not shortened.
            start_time (:class:`~datetime.datetime`):
                The start time of the span. On the client side, this is the
                time kept by the local machine where the span execution starts.
                On the server side, this is the time when the server's
                application handler starts running.
            end_time (:class:`~datetime.datetime`):
                The end time of the span. On the client side, this is the time
                kept by the local machine where the span execution ends. On the
                server side, this is the time when the server application
                handler stops running.
            parent_span_id (str): The [SPAN_ID] of this span's parent span.
                If this is a root span, then this field must be empty.
            attributes (dict): A set of attributes on the span. There is a
                limit of 32 attributes per span.
            stack_trace (dict):
                Stack trace captured at the start of the span.
                Contains two fields, stackFrames is a list of stack frames in
                this call stack, a maximum of 128 frames are allowed per
                StackFrame; stackTraceHashId is used to conserve network
                bandwidth for duplicate stack traces within a single trace.
            time_events (dict):
                The included time events. There can be up to 32 annotations
                and 128 message events per span.
            links (dict): A maximum of 128 links are allowed per Span.
            status (dict): An optional final status for this span.
            same_process_as_parent_span (bool): A highly recommended but not
                required flag that identifies when a trace crosses a process
                boundary. True when the parent_span belongs to the same process
                as the current span.
            child_span_count (int): An optional number of child spans that were
                generated while this span was active. If set, allows
                implementation to detect missing child spans.
            retry (Optional[google.api_core.retry.Retry]):  A retry object used
                to retry requests. If ``None`` is specified, requests will not
                be retried.
            timeout (Optional[float]): The amount of time, in seconds, to wait
                for the request to complete. Note that if ``retry`` is
                specified, the timeout applies to each individual attempt.

        Returns:
            A :class:`~google.cloud.trace_v2.types.Span` instance.

        Raises:
            google.api_core.exceptions.GoogleAPICallError: If the request
                    failed for any reason.
            google.api_core.exceptions.RetryError: If the request failed due
                    to a retryable error and retry attempts failed.
            ValueError: If the parameters are invalid.
        """
        # Convert the dict type parameters to protobuf
        display_name = _dict_mapping_to_pb(display_name, "TruncatableString")
        start_time = _datetime_to_pb_timestamp(start_time)
        end_time = _datetime_to_pb_timestamp(end_time)

        if attributes is not None:
            attributes = _span_attrs_to_pb(attributes, "Attributes")

        if stack_trace is not None:
            stack_trace = _dict_mapping_to_pb(stack_trace, "StackTrace")

        if time_events is not None:
            time_events = _span_attrs_to_pb(time_events, "TimeEvents")

        if links is not None:
            links = _span_attrs_to_pb(links, "Links")

        if status is not None:
            status = _status_mapping_to_pb(status)

        if same_process_as_parent_span is not None:
            same_process_as_parent_span = _value_to_pb(
                same_process_as_parent_span, "BoolValue"
            )

        if child_span_count is not None:
            child_span_count = _value_to_pb(child_span_count, "Int32Value")

        return self._gapic_api.create_span(
            name=name,
            span_id=span_id,
            display_name=display_name,
            start_time=start_time,
            end_time=end_time,
            parent_span_id=parent_span_id,
            attributes=attributes,
            stack_trace=stack_trace,
            time_events=time_events,
            links=links,
            status=status,
            same_process_as_parent_span=same_process_as_parent_span,
            child_span_count=child_span_count,
        )
    def test_run_in_transaction_w_abort_w_retry_metadata_deadline(self):
        import datetime
        from google.api_core.exceptions import Aborted
        from google.protobuf.duration_pb2 import Duration
        from google.rpc.error_details_pb2 import RetryInfo
        from google.cloud.spanner_v1.proto.spanner_pb2 import CommitResponse
        from google.cloud.spanner_v1.proto.transaction_pb2 import (
            Transaction as TransactionPB,
            TransactionOptions,
        )
        from google.cloud.spanner_v1.transaction import Transaction
        from google.cloud._helpers import UTC
        from google.cloud._helpers import _datetime_to_pb_timestamp

        TABLE_NAME = "citizens"
        COLUMNS = ["email", "first_name", "last_name", "age"]
        VALUES = [
            ["*****@*****.**", "Phred", "Phlyntstone", 32],
            ["*****@*****.**", "Bharney", "Rhubble", 31],
        ]
        TRANSACTION_ID = b"FACEDACE"
        RETRY_SECONDS = 1
        RETRY_NANOS = 3456
        transaction_pb = TransactionPB(id=TRANSACTION_ID)
        now = datetime.datetime.utcnow().replace(tzinfo=UTC)
        now_pb = _datetime_to_pb_timestamp(now)
        response = CommitResponse(commit_timestamp=now_pb)
        retry_info = RetryInfo(
            retry_delay=Duration(seconds=RETRY_SECONDS, nanos=RETRY_NANOS)
        )
        trailing_metadata = [
            ("google.rpc.retryinfo-bin", retry_info.SerializeToString())
        ]
        aborted = _make_rpc_error(Aborted, trailing_metadata=trailing_metadata)
        gax_api = self._make_spanner_api()
        gax_api.begin_transaction.return_value = transaction_pb
        gax_api.commit.side_effect = [aborted, response]
        database = self._make_database()
        database.spanner_api = gax_api
        session = self._make_one(database)
        session._session_id = self.SESSION_ID

        called_with = []

        def unit_of_work(txn, *args, **kw):
            called_with.append((txn, args, kw))
            txn.insert(TABLE_NAME, COLUMNS, VALUES)

        # retry once w/ timeout_secs=1
        def _time(_results=[1, 1.5]):
            return _results.pop(0)

        with mock.patch("time.time", _time):
            with mock.patch("time.sleep") as sleep_mock:
                with self.assertRaises(Aborted):
                    session.run_in_transaction(unit_of_work, "abc", timeout_secs=1)

        sleep_mock.assert_not_called()

        self.assertEqual(len(called_with), 1)
        txn, args, kw = called_with[0]
        self.assertIsInstance(txn, Transaction)
        self.assertIsNone(txn.committed)
        self.assertEqual(args, ("abc",))
        self.assertEqual(kw, {})

        expected_options = TransactionOptions(read_write=TransactionOptions.ReadWrite())
        gax_api.begin_transaction.assert_called_once_with(
            self.SESSION_NAME,
            expected_options,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )
        gax_api.commit.assert_called_once_with(
            self.SESSION_NAME,
            txn._mutations,
            transaction_id=TRANSACTION_ID,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )