Exemplo n.º 1
0
 def test_w_struct_value(self):
     from google.protobuf.struct_pb2 import Value
     STRING = 'STRING'
     PI = 3.1415926
     value_pb = Value()
     value_pb.struct_value.fields['string'].string_value = STRING
     value_pb.struct_value.fields['bool'].bool_value = True
     value_pb.struct_value.fields['number'].number_value = PI
     self.assertEqual(self._callFUT(value_pb),
                      {'string': STRING, 'bool': True, 'number': PI})
Exemplo n.º 2
0
    def test_w_empty_value(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        field_type = Type(code=TypeCode.STRING)
        value_pb = Value()

        with self.assertRaises(ValueError):
            self._callFUT(value_pb, field_type)
Exemplo n.º 3
0
def test_create_grpc_response_jsondata():
    user_model = UserObject()
    request_data = np.array([[5, 6, 7]])
    datadef = scu.array_to_grpc_datadef("ndarray", request_data)
    request = prediction_pb2.SeldonMessage(data=datadef)
    raw_response = {"output": "data"}
    sm = scu.construct_response(user_model, True, request, raw_response)
    assert sm.data.WhichOneof("data_oneof") == None
    emptyValue = Value()
    assert sm.jsonData != emptyValue
Exemplo n.º 4
0
    def test_w_bool(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = True
        field_type = Type(code=TypeCode.BOOL)
        value_pb = Value(bool_value=VALUE)

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 5
0
    def test__update_state_metadata(self):
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.protobuf.struct_pb2 import Value
        from google.cloud._testing import _Monkey
        from google.cloud import operation as MUT

        operation = self._make_one(None, None)
        self.assertIsNone(operation.metadata)

        val_pb = Value(number_value=1337)
        type_url = 'type.googleapis.com/%s' % (Value.DESCRIPTOR.full_name, )
        val_any = Any(type_url=type_url, value=val_pb.SerializeToString())
        operation_pb = operations_pb2.Operation(metadata=val_any)

        with _Monkey(MUT, _TYPE_URL_MAP={type_url: Value}):
            operation._update_state(operation_pb)

        self.assertEqual(operation.metadata, val_pb)
Exemplo n.º 6
0
    def test_w_array_empty(self):
        from google.protobuf.struct_pb2 import Value, ListValue
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        field_type = Type(code=TypeCode.ARRAY,
                          array_element_type=Type(code=TypeCode.INT64))
        value_pb = Value(list_value=ListValue(values=[]))

        self.assertEqual(self._callFUT(value_pb, field_type), [])
Exemplo n.º 7
0
def test_create_rest_response_jsondata():
    user_model = UserObject()
    request_data = np.array([[5, 6, 7]])
    datadef = scu.array_to_rest_datadef("ndarray", request_data)
    json_request = { "jsonData": datadef }
    raw_response = {"output": "data"}
    json_response = scu.construct_response_json(user_model, True, json_request, raw_response)
    assert "data" not in json_response
    emptyValue = Value()
    assert json_response["jsonData"] != emptyValue
Exemplo n.º 8
0
def test_create_grpc_response_customdata():
    user_model = UserObject()
    request_data = np.array([[5, 6, 7]])
    datadef = scu.array_to_grpc_datadef("ndarray", request_data)
    request = prediction_pb2.SeldonMessage(data=datadef)
    raw_response = any_pb2.Any(value=b"testdata")
    sm = scu.construct_response(user_model, True, request, raw_response)
    assert sm.data.WhichOneof("data_oneof") is None
    emptyValue = Value()
    assert sm.customData != emptyValue
Exemplo n.º 9
0
    def test_w_unknown_type(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        field_type = Type(code=TypeCode.TYPE_CODE_UNSPECIFIED)
        value_pb = Value(string_value="Borked")

        with self.assertRaises(ValueError):
            self._callFUT(value_pb, field_type)
Exemplo n.º 10
0
    def test_w_float(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = 3.14159
        field_type = Type(code=TypeCode.FLOAT64)
        value_pb = Value(number_value=VALUE)

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 11
0
def get_prediction(instance):
    logging.info('Sending prediction request to AI Platform ...')
    try:
        pb_instance = json_format.ParseDict(instance, Value())
        response = aip_client.predict(endpoint=aip_endpoint_name,
                                      instances=[pb_instance])
        return list(response.predictions[0])
    except Exception as err:
        logging.error(f'Prediction request failed: {type(err)}: {err}')
        return None
Exemplo n.º 12
0
    def test_w_numeric(self):
        import decimal
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1.proto.type_pb2 import Type, NUMERIC

        VALUE = decimal.Decimal("99999999999999999999999999999.999999999")
        field_type = Type(code=NUMERIC)
        value_pb = Value(string_value=str(VALUE))

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 13
0
def _merge_struct(lhs, rhs, type_):
    """Helper for '_merge_by_type'."""
    fields = type_.struct_type.fields
    lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)

    # Sanity check: If either list is empty, short-circuit.
    # This is effectively a no-op.
    if not len(lhs) or not len(rhs):
        return Value(list_value=ListValue(values=(lhs + rhs)))

    candidate_type = fields[len(lhs) - 1].type
    first = rhs.pop(0)
    if first.HasField(
            "null_value") or candidate_type.code in _UNMERGEABLE_TYPES:
        lhs.append(first)
    else:
        last = lhs.pop()
        lhs.append(_merge_by_type(last, first, candidate_type))
    return Value(list_value=ListValue(values=lhs + rhs))
Exemplo n.º 14
0
    def test_w_bytes(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = b"Value"
        field_type = Type(code=TypeCode.BYTES)
        value_pb = Value(string_value=VALUE)

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 15
0
    def test_w_int(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = 12345
        field_type = Type(code=TypeCode.INT64)
        value_pb = Value(string_value=str(VALUE))

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 16
0
    def test_write_entries_multiple(self):
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        json_payload = {'payload': 'PAYLOAD', 'type': 'json'}
        json_struct_pb = Struct(fields={
            key: Value(string_value=value)
            for key, value in json_payload.items()
        })
        self._write_entries_multiple_helper(json_payload, json_struct_pb)
Exemplo n.º 17
0
    def test_list_entries_with_paging(self):
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        payload = {'message': 'MESSAGE', 'weather': 'sunny'}
        struct_pb = Struct(fields={
            key: Value(string_value=value)
            for key, value in payload.items()
        })
        self._list_entries_with_paging_helper(payload, struct_pb)
Exemplo n.º 18
0
    def test_w_date(self):
        import datetime
        from google.protobuf.struct_pb2 import Value
        from google.cloud.proto.spanner.v1.type_pb2 import Type, DATE

        VALUE = datetime.date.today()
        field_type = Type(code=DATE)
        value_pb = Value(string_value=VALUE.isoformat())

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
Exemplo n.º 19
0
    def test_log_proto_explicit(self):
        import datetime
        from google.cloud.logging.resource import Resource
        from google.cloud.logging.entries import ProtobufEntry
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        message = Struct(fields={"foo": Value(bool_value=True)})
        LABELS = {"foo": "bar", "baz": "qux"}
        IID = "IID"
        SEVERITY = "CRITICAL"
        METHOD = "POST"
        URI = "https://api.example.com/endpoint"
        STATUS = "500"
        TRACE = "12345678-1234-5678-1234-567812345678"
        SPANID = "000000000000004a"
        REQUEST = {
            "requestMethod": METHOD,
            "requestUrl": URI,
            "status": STATUS
        }
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        RESOURCE = Resource(type="gae_app",
                            labels={
                                "module_id": "default",
                                "version_id": "test"
                            })
        ENTRY = ProtobufEntry(
            payload=message,
            labels=LABELS,
            insert_id=IID,
            severity=SEVERITY,
            http_request=REQUEST,
            timestamp=TIMESTAMP,
            resource=RESOURCE,
            trace=TRACE,
            span_id=SPANID,
            trace_sampled=True,
        )
        client = _Client(project=self.PROJECT, connection=_make_credentials())
        logger = _Logger()
        batch = self._make_one(logger, client=client)
        batch.log_proto(
            message,
            labels=LABELS,
            insert_id=IID,
            severity=SEVERITY,
            http_request=REQUEST,
            timestamp=TIMESTAMP,
            resource=RESOURCE,
            trace=TRACE,
            span_id=SPANID,
            trace_sampled=True,
        )
        self.assertEqual(batch.entries, [ENTRY])
Exemplo n.º 20
0
    def test_commit_w_alternate_client(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value
        from google.cloud.logging.logger import Logger
        from google.cloud.logging.entries import _GLOBAL_RESOURCE

        TEXT = "This is the entry text"
        STRUCT = {"message": TEXT, "weather": "partly cloudy"}
        message = Struct(fields={"foo": Value(bool_value=True)})
        DEFAULT_LABELS = {"foo": "spam"}
        LABELS = {"foo": "bar", "baz": "qux"}
        SEVERITY = "CRITICAL"
        METHOD = "POST"
        URI = "https://api.example.com/endpoint"
        STATUS = "500"
        REQUEST = {
            "requestMethod": METHOD,
            "requestUrl": URI,
            "status": STATUS
        }
        client1 = _Client(project=self.PROJECT)
        client2 = _Client(project=self.PROJECT)
        api = client2.logging_api = _DummyLoggingAPI()
        logger = Logger("logger_name", client1, labels=DEFAULT_LABELS)
        ENTRIES = [
            {
                "textPayload": TEXT,
                "labels": LABELS,
                "resource": _GLOBAL_RESOURCE._to_dict(),
            },
            {
                "jsonPayload": STRUCT,
                "severity": SEVERITY,
                "resource": _GLOBAL_RESOURCE._to_dict(),
            },
            {
                "protoPayload": json.loads(MessageToJson(message)),
                "httpRequest": REQUEST,
                "resource": _GLOBAL_RESOURCE._to_dict(),
            },
        ]
        batch = self._make_one(logger, client=client1)

        batch.log_text(TEXT, labels=LABELS)
        batch.log_struct(STRUCT, severity=SEVERITY)
        batch.log_proto(message, http_request=REQUEST)
        batch.commit(client=client2)

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(
            api._write_entries_called_with,
            (ENTRIES, logger.full_name, None, DEFAULT_LABELS),
        )
Exemplo n.º 21
0
def to_value(self: Message) -> Value:
    """Converts a message type to a :class:`~google.protobuf.struct_pb2.Value` object.

    Args:
      message: the message to convert

    Returns:
      the message as a :class:`~google.protobuf.struct_pb2.Value` object
    """
    tmp_dict = json_format.MessageToDict(self._pb)
    return json_format.ParseDict(tmp_dict, Value())
Exemplo n.º 22
0
    def test_log_proto_defaults(self):
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        message = Struct(fields={'foo': Value(bool_value=True)})
        client = _Client(project=self.PROJECT, connection=_make_credentials())
        logger = _Logger()
        batch = self._make_one(logger, client=client)
        batch.log_proto(message)
        self.assertEqual(batch.entries,
                         [('proto', message, None, None, None, None, None)])
Exemplo n.º 23
0
 def test_log_proto_defaults(self):
     from google.protobuf.struct_pb2 import Struct, Value
     message = Struct(fields={'foo': Value(bool_value=True)})
     connection = _Connection()
     CLIENT = _Client(project=self.PROJECT, connection=connection)
     logger = _Logger()
     batch = self._makeOne(logger, client=CLIENT)
     batch.log_proto(message)
     self.assertEqual(len(connection._requested), 0)
     self.assertEqual(batch.entries,
                      [('proto', message, None, None, None, None)])
Exemplo n.º 24
0
    def test_w_float_str(self):
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = "3.14159"
        field_type = Type(code=TypeCode.FLOAT64)
        value_pb = Value(string_value=VALUE)
        expected_value = 3.14159

        self.assertEqual(self._callFUT(value_pb, field_type), expected_value)
Exemplo n.º 25
0
def update_finding(source_name):
    # [START update_finding]
    from google.cloud import securitycenter
    from google.protobuf.struct_pb2 import Value
    from google.protobuf import field_mask_pb2
    from google.protobuf.timestamp_pb2 import Timestamp

    client = securitycenter.SecurityCenterClient()
    # Only update the specific source property and event_time.  event_time
    # is required for updates.
    field_mask = field_mask_pb2.FieldMask(
        paths=["source_properties.s_value", "event_time"])
    value = Value()
    value.string_value = "new_string"

    # Set the update time to Now.  This must be some time greater then the
    # event_time on the original finding.
    now_proto = Timestamp()
    now_proto.GetCurrentTime()

    # source_name is the resource path for a source that has been
    # created previously (you can use list_sources to find a specific one).
    # Its format is:
    # source_name = "organizations/{organization_id}/sources/{source_id}"
    # e.g.:
    # source_name = "organizations/111122222444/sources/1234"
    finding_name = "{}/findings/samplefindingid2".format(source_name)
    updated_finding = client.update_finding(
        {
            "name": finding_name,
            "source_properties": {
                "s_value": value
            },
            "event_time": now_proto,
        },
        update_mask=field_mask,
    )

    print("New Source properties: {}, Event Time {}".format(
        updated_finding.source_properties,
        updated_finding.event_time.ToDatetime()))
Exemplo n.º 26
0
def _merge_float64(lhs, rhs, type_):  # pylint: disable=unused-argument
    """Helper for '_merge_by_type'."""
    lhs_kind = lhs.WhichOneof("kind")
    if lhs_kind == "string_value":
        return Value(string_value=lhs.string_value + rhs.string_value)
    rhs_kind = rhs.WhichOneof("kind")
    array_continuation = (lhs_kind == "number_value"
                          and rhs_kind == "string_value"
                          and rhs.string_value == "")
    if array_continuation:
        return lhs
    raise Unmergeable(lhs, rhs, type_)
Exemplo n.º 27
0
def create_training_pipeline_custom_job_sample(
    project: str,
    display_name: str,
    model_display_name: str,
    container_image_uri: str,
    base_output_directory_prefix: str,
    location: str = "us-central1",
    api_endpoint: str = "us-central1-aiplatform.googleapis.com",
):
    client_options = {"api_endpoint": api_endpoint}
    # Initialize client that will be used to create and send requests.
    # This client only needs to be created once, and can be reused for multiple requests.
    client = aiplatform.gapic.PipelineServiceClient(client_options=client_options)

    training_task_inputs_dict = {
        "workerPoolSpecs": [
            {
                "replicaCount": 1,
                "machineSpec": {"machineType": "n1-standard-4"},
                "containerSpec": {
                    # A working docker image can be found at gs://cloud-samples-data/ai-platform/mnist_tfrecord/custom_job
                    "imageUri": container_image_uri,
                    "args": [
                        # AIP_MODEL_DIR is set by the service according to baseOutputDirectory.
                        "--model_dir=$(AIP_MODEL_DIR)",
                    ],
                },
            }
        ],
        "baseOutputDirectory": {
            # The GCS location for outputs must be accessible by the project's AI Platform service account.
            "output_uri_prefix": base_output_directory_prefix
        },
    }
    training_task_inputs = json_format.ParseDict(training_task_inputs_dict, Value())

    training_task_definition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/custom_task_1.0.0.yaml"
    image_uri = "gcr.io/cloud-aiplatform/prediction/tf-cpu.1-15:latest"

    training_pipeline = {
        "display_name": display_name,
        "training_task_definition": training_task_definition,
        "training_task_inputs": training_task_inputs,
        "model_to_upload": {
            "display_name": model_display_name,
            "container_spec": {"image_uri": image_uri},
        },
    }
    parent = f"projects/{project}/locations/{location}"
    response = client.create_training_pipeline(
        parent=parent, training_pipeline=training_pipeline
    )
    print("response:", response)
    def test_log_proto_w_explicit_client_labels_severity_httpreq(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        message = Struct(fields={'foo': Value(bool_value=True)})
        DEFAULT_LABELS = {'foo': 'spam'}
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        IID = 'IID'
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        ENTRIES = [{
            'logName':
            'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME),
            'protoPayload':
            json.loads(MessageToJson(message)),
            'resource': {
                'type': 'global',
                'labels': {},
            },
            'labels':
            LABELS,
            'insertId':
            IID,
            'severity':
            SEVERITY,
            'httpRequest':
            REQUEST,
        }]
        client1 = _Client(self.PROJECT)
        client2 = _Client(self.PROJECT)
        api = client2.logging_api = _DummyLoggingAPI()
        logger = self._make_one(self.LOGGER_NAME,
                                client=client1,
                                labels=DEFAULT_LABELS)

        logger.log_proto(message,
                         client=client2,
                         labels=LABELS,
                         insert_id=IID,
                         severity=SEVERITY,
                         http_request=REQUEST)

        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, None, None, None))
Exemplo n.º 29
0
    def test_w_json(self):
        import json
        from google.protobuf.struct_pb2 import Value
        from google.cloud.spanner_v1 import Type
        from google.cloud.spanner_v1 import TypeCode

        VALUE = {"id": 27863, "Name": "Anamika"}
        str_repr = json.dumps(VALUE, sort_keys=True, separators=(",", ":"))

        field_type = Type(code=TypeCode.JSON)
        value_pb = Value(string_value=str_repr)

        self.assertEqual(self._callFUT(value_pb, field_type), VALUE)

        VALUE = None
        str_repr = json.dumps(VALUE, sort_keys=True, separators=(",", ":"))

        field_type = Type(code=TypeCode.JSON)
        value_pb = Value(string_value=str_repr)

        self.assertEqual(self._callFUT(value_pb, field_type), {})
Exemplo n.º 30
0
    def test_commit_w_alternate_client(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value
        from google.cloud.logging.logger import Logger

        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        message = Struct(fields={'foo': Value(bool_value=True)})
        DEFAULT_LABELS = {'foo': 'spam'}
        LABELS = {
            'foo': 'bar',
            'baz': 'qux',
        }
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        client1 = _Client(project=self.PROJECT)
        client2 = _Client(project=self.PROJECT)
        api = client2.logging_api = _DummyLoggingAPI()
        logger = Logger('logger_name', client1, labels=DEFAULT_LABELS)
        RESOURCE = {'type': 'global'}
        ENTRIES = [
            {
                'textPayload': TEXT,
                'labels': LABELS
            },
            {
                'jsonPayload': STRUCT,
                'severity': SEVERITY
            },
            {
                'protoPayload': json.loads(MessageToJson(message)),
                'httpRequest': REQUEST
            },
        ]
        batch = self._make_one(logger, client=client1)

        batch.log_text(TEXT, labels=LABELS)
        batch.log_struct(STRUCT, severity=SEVERITY)
        batch.log_proto(message, http_request=REQUEST)
        batch.commit(client=client2)

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS))