Exemple #1
0
def json_to_seldon_model_metadata(
    metadata_json: Dict, ) -> prediction_pb2.SeldonModelMetadata:
    """
    Parses JSON input to SeldonModelMetadata proto

    Parameters
    ----------
    metadata_json
        JSON input

    Returns
    -------
        SeldonModelMetadata
    """
    if metadata_json is None:
        metadata_json = {}
    metadata_proto = prediction_pb2.SeldonModelMetadata()
    try:
        json_format.ParseDict(metadata_json,
                              metadata_proto,
                              ignore_unknown_fields=True)
        return metadata_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException(f"Invalid metadata: {pbExc}")
Exemple #2
0
def parse_from_mongo(mongo_dict, proto):
    """Parse a Protobuf from a dict coming from MongoDB.

    Args:
        mongo_dict: a dict coming from MongoDB, or None. This dict will be
            modified by the function: it removes all the keys prefixed by "_"
            and convert datetime objects to iso strings.
        proto: a protobuffer to merge data into.
    Returns: a boolean indicating whether the input had actual data.
    """
    if mongo_dict is None:
        return False
    to_delete = [k for k in mongo_dict if k.startswith('_')]
    for key in to_delete:
        del mongo_dict[key]
    _convert_datetimes_to_string(mongo_dict)
    try:
        json_format.ParseDict(mongo_dict, proto, ignore_unknown_fields=True)
    except json_format.ParseError as error:
        logging.warning(
            'Error %s while parsing a JSON dict for proto type %s:\n%s', error,
            proto.__class__.__name__, mongo_dict)
        return False
    return True
Exemple #3
0
def parse_from_mongo(mongo_dict: Optional[dict[str, Any]],
                     proto: message.Message,
                     id_field: Optional[str] = None) -> bool:
    """Parse a Protobuf from a dict coming from MongoDB.

    Args:
        mongo_dict: a dict coming from MongoDB, or None. This dict will be
            modified by the function: it removes all the keys prefixed by "_"
            and convert datetime objects to iso strings.
        proto: a protobuffer to merge data into.
        id_field (optional): a field in the proto where we wish to put the mongo ID. It must be a
            string field.
    Returns: a boolean indicating whether the input had actual data.
    """

    if mongo_dict is None:
        return False
    message_id = str(mongo_dict.pop('_id', ''))
    to_delete = [k for k in mongo_dict if k.startswith('_')]
    for key in to_delete:
        del mongo_dict[key]
    _convert_datetimes_to_string(mongo_dict)
    try:
        json_format.ParseDict(mongo_dict,
                              proto,
                              ignore_unknown_fields=not _IS_TEST_ENV)
    except json_format.ParseError as error:
        if _IS_TEST_ENV:
            raise error
        logging.warning(
            'Error %s while parsing a JSON dict for proto type %s:\n%s', error,
            proto.__class__.__name__, mongo_dict)
        return False
    if message_id and id_field:
        setattr(proto, id_field, message_id)
    return True
def create_data_labeling_job_sample(
    project: str,
    display_name: str,
    dataset_name: str,
    instruction_uri: str,
    inputs_schema_uri: str,
    annotation_spec: str,
    location: str = "us-central1",
    api_endpoint: str = "us-central1-aiplatform.googleapis.com",
):
    # The AI Platform services require regional API endpoints.
    client_options = {"api_endpoint": api_endpoint}
    # Initialize client that will be used to create and send requests.
    # This client only needs to be created once, and can be reused for multiple requests.
    client = aiplatform.gapic.JobServiceClient(client_options=client_options)
    inputs_dict = {"annotation_specs": [annotation_spec]}
    inputs = json_format.ParseDict(inputs_dict, Value())

    data_labeling_job = {
        "display_name": display_name,
        # Full resource name: projects/{project_id}/locations/{location}/datasets/{dataset_id}
        "datasets": [dataset_name],
        # labeler_count must be 1, 3, or 5
        "labeler_count": 1,
        "instruction_uri": instruction_uri,
        "inputs_schema_uri": inputs_schema_uri,
        "inputs": inputs,
        "annotation_labels": {
            "aiplatform.googleapis.com/annotation_set_name": "my_test_saved_query"
        },
    }
    parent = f"projects/{project}/locations/{location}"
    response = client.create_data_labeling_job(
        parent=parent, data_labeling_job=data_labeling_job
    )
    print("response:", response)
    def to_proto(self):
        d = {
            'sync_interval': self.train_options.sync_interval,
            'do_monitoring': self.train_options.do_monitoring,
            'replace_model': self.train_options.replace_model,
            'training_data_uri': self.training_data_uri,
            'training_output_uri': self.training_output_uri,
            'model_uri': self.model_uri,
            'debug': self.debug,
            'kc_config': self.kc_config
        }

        conf = json_format.ParseDict(
            d, BackendConfigMsg.KerasClassificationConfig())

        msg = BackendConfigMsg(backend_type=rv.KERAS_CLASSIFICATION,
                               keras_classification_config=conf)

        if self.pretrained_model_uri:
            msg.MergeFrom(
                BackendConfigMsg(
                    pretrained_model_uri=self.pretrained_model_uri))

        return msg
Exemple #6
0
    def __init__(
        self,
        job_file='test_job.yaml',
        client=None,
        config=None,
        pool=None,
        job_config=None,
        options=[],
    ):

        self.config = config or IntegrationTestConfig()
        self.client = client or Client()
        self.pool = pool or Pool(self.config, self.client)
        self.job_id = None
        if job_config is None:
            job_config_dump = load_test_config(job_file)
            job_config = job.JobConfig()
            json_format.ParseDict(job_config_dump, job_config)

        # apply options
        for o in options:
            o(job_config)

        self.job_config = job_config
Exemple #7
0
def test_auto_rollback_reduce_instances(stateless_job):
    stateless_job.create()
    stateless_job.wait_for_state(goal_state="RUNNING")

    job_spec_dump = load_test_config(
        UPDATE_STATELESS_JOB_BAD_HEALTH_CHECK_SPEC)
    updated_job_spec = JobSpec()
    json_format.ParseDict(job_spec_dump, updated_job_spec)

    # increase the instance count
    updated_job_spec.instance_count = stateless_job.job_spec.instance_count + 3

    update = StatelessUpdate(
        stateless_job,
        updated_job_spec=updated_job_spec,
        roll_back_on_failure=True,
        max_instance_attempts=1,
        max_failure_instances=1,
        batch_size=1,
    )
    update.create()
    update.wait_for_state(goal_state="ROLLED_BACK")
    assert (len(
        stateless_job.query_pods()) == stateless_job.job_spec.instance_count)
Exemple #8
0
    def update(self, new_job_file):
        """
        updates a job
        :param new_job_file: The job config file used for updating
        """
        # wait for job manager leader
        self.wait_for_jobmgr_available()
        job_config_dump = load_test_config(new_job_file)
        new_job_config = job.JobConfig()
        json_format.ParseDict(job_config_dump, new_job_config)

        request = job.UpdateRequest(
            id=peloton.JobID(value=self.job_id), config=new_job_config
        )
        resp = self.client.job_svc.Update(
            request,
            metadata=self.client.jobmgr_metadata,
            timeout=self.config.rpc_timeout_sec,
        )
        assert not resp.HasField("error")

        # update the config
        self.job_config = new_job_config
        log.info("updated job %s", self.job_id)
    def TransformInput(self, request, context):
        if hasattr(self.user_model, "transform_input_grpc"):
            return self.user_model.transform_input_grpc(request)
        else:
            features = get_data_from_proto(request)
            datadef = request.data
            data_type = request.WhichOneof("data_oneof")

            transformed = transform_input(self.user_model, features,
                                          datadef.names)

            # Construct meta data
            meta = prediction_pb2.Meta()
            metaJson = {}
            tags = get_custom_tags(self.user_model)
            if tags:
                metaJson["tags"] = tags
            metrics = get_custom_metrics(self.user_model)
            if metrics:
                metaJson["metrics"] = metrics
            json_format.ParseDict(metaJson, meta)

            if isinstance(transformed, np.ndarray) or data_type == "data":
                transformed = np.array(transformed)
                feature_names = get_feature_names(self.user_model,
                                                  datadef.names)
                if data_type == "data":
                    default_data_type = request.data.WhichOneof("data_oneof")
                else:
                    default_data_type = "tensor"
                data = array_to_grpc_datadef(transformed, feature_names,
                                             default_data_type)
                return prediction_pb2.SeldonMessage(data=data, meta=meta)
            else:
                return prediction_pb2.SeldonMessage(binData=transformed,
                                                    meta=meta)
Exemple #10
0
def test__create_update_add_instances_with_bad_config(stateless_job):
    stateless_job.create()
    stateless_job.wait_for_state(goal_state="RUNNING")

    job_spec_dump = load_test_config(UPDATE_STATELESS_JOB_BAD_SPEC)
    updated_job_spec = JobSpec()
    json_format.ParseDict(job_spec_dump, updated_job_spec)

    updated_job_spec.instance_count = stateless_job.job_spec.instance_count + 3

    update = StatelessUpdate(
        stateless_job,
        batch_size=1,
        updated_job_spec=updated_job_spec,
        max_failure_instances=1,
        max_instance_attempts=1,
    )
    update.create()
    update.wait_for_state(goal_state="FAILED", failed_state="SUCCEEDED")

    # only one instance should be added
    assert (len(
        stateless_job.query_pods()) == stateless_job.job_spec.instance_count +
            1)
    def post(self):
        """Add dependent concept to database."""
        request, error_message = flask_request_response.message_request(
            _api_intput_pb2.AddDependentConcept, ADD_DEPENDENT_CONCEPT_API,
            POST_REQUEST)

        if error_message is not None:
            return flask_request_response.error_response(
                [error_message['err_message']], ADD_DEPENDENT_CONCEPT_API,
                POST_REQUEST)

        try:
            dependent_concept_response = add_dependent_concept_query_response(
                request.concept_key, request.dependent_concept_name,
                request.dependent_concept_key, request.created_by)
            response = _api_output_pb2.AddDependentConcept()
            dependent_concept_result = _json_format.ParseDict(
                dependent_concept_response, response)
            return flask_request_response.message_response(
                dependent_concept_result, ADD_DEPENDENT_CONCEPT_API,
                POST_REQUEST, 200)
        except Exception as err:
            return flask_request_response.error_response(
                [str(err)], ADD_DEPENDENT_CONCEPT_API, POST_REQUEST)
    def with_template(self, template):
        """Use a template as the base for configuring Keras Classification.

        Args:
            template: dict, string or uri
        """
        template_json = None
        if type(template) is dict:
            msg = json_format.ParseDict(template, PipelineConfig())

            template_json = json_format.MessageToDict(msg)
        else:
            # Try parsing the string as a message, on fail assume it's a URI
            msg = None
            try:
                msg = json_format.Parse(template, PipelineConfig())
            except json_format.ParseError:
                msg = json_format.Parse(file_to_str(template),
                                        PipelineConfig())
            template_json = json_format.MessageToDict(msg)

        b = deepcopy(self)
        b.config['kc_config'] = template_json
        return b
def test_proto_gets_meta():
    user_object = UserObject(ret_meta=True)
    app = SeldonModelGRPC(user_object)
    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(
            shape=(2, 1),
            values=arr
        )
    )
    meta = prediction_pb2.Meta()
    metaJson = {"puid":"abc"}
    json_format.ParseDict(metaJson, meta)
    request = prediction_pb2.SeldonMessage(data=datadef, meta=meta)
    resp = app.Predict(request, None)
    jStr = json_format.MessageToJson(resp)
    j = json.loads(jStr)
    print(j)
    assert j["meta"]["tags"] == {"inc_meta":{"puid":"abc"}}
    # add default type
    j["meta"]["metrics"][0]["type"] = "COUNTER"
    assert j["meta"]["metrics"] == user_object.metrics()
    assert j["data"]["tensor"]["shape"] == [2, 1]
    assert j["data"]["tensor"]["values"] == [1, 2]
Exemple #14
0
  def to_entry(self):
    if self.is_raw_entry():
      return RawEntry(
                entryname=self.entryname,
                type_url='',
                raw_bytes=self.msg_dict['protobag_raw_entry_bytes'],
                serdes=self.serdes)
    else:
      if self.descriptor_data:
        # Maybe use this data to facilitate messsage parsing below
        self.serdes.register_descriptor_data(
          self.type_url,
          self.descriptor_data)
      msg_cls = self.serdes.get_msg_cls_for_type(self.type_url)
      msg = msg_cls()
      json_format.ParseDict(self.msg_dict, msg)

      if self.is_stamped_entry():
        return StampedEntry(
                entryname=self.entryname,
                type_url=self.type_url,
                msg=msg,
                
                topic=self.topic,
                timestamp=self.timestamp,
                
                serdes=self.serdes,
                descriptor_data=self.descriptor_data)
      else:
        return MessageEntry(
                entryname=self.entryname,
                type_url=self.type_url,
                msg=msg,
                
                serdes=self.serdes,
                descriptor_data=self.descriptor_data)
Exemple #15
0
def test_model_tftensor_ok():
    user_object = UserObject()
    seldon_metrics = SeldonMetrics()
    app = get_rest_microservice(user_object, seldon_metrics)
    client = app.test_client()
    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(tftensor=tf.make_tensor_proto(arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    jStr = json_format.MessageToJson(request)
    rv = client.get("/predict?json=" + jStr)
    j = json.loads(rv.data)
    logging.info(j)
    assert rv.status_code == 200
    assert j["meta"]["tags"] == {"mytag": 1}
    assert j["meta"]["metrics"][0]["key"] == user_object.metrics()[0]["key"]
    assert j["meta"]["metrics"][0]["value"] == user_object.metrics(
    )[0]["value"]
    assert "tftensor" in j["data"]
    tfp = TensorProto()
    json_format.ParseDict(j["data"].get("tftensor"),
                          tfp,
                          ignore_unknown_fields=False)
    arr2 = tf.make_ndarray(tfp)
    assert np.array_equal(arr, arr2)
Exemple #16
0
    def convert_record_to_proto(self, airtable_record: airtable.Record[Mapping[str, Any]]) \
            -> Tuple[message.Message, str]:
        """Convert an airtable record to an actual proto message.

        Also runs all the necessary checks on it.
        """

        fields = self.convert_record(airtable_record)
        proto = self.proto_type()
        _id = fields.pop('_id', None)
        try:
            json_format.ParseDict(fields, proto)
        except json_format.ParseError as error:
            raise ValueError(
                f'Error while parsing:\n{json.dumps(fields, indent=2)}'
            ) from error
        if _has_any_check_error(proto,
                                _id,
                                self.checkers,
                                _BEFORE_TRANSLATION_CHECKERS,
                                locale='fr'):
            # Errors messages are already logged in the function.
            raise ValueError()
        return proto, _id
Exemple #17
0
 def testParseDict(self):
     expected = 12345
     js_dict = {'int32Value': expected}
     message = json_format_proto3_pb2.TestMessage()
     json_format.ParseDict(js_dict, message)
     self.assertEqual(expected, message.int32_value)
Exemple #18
0
_TEST_MODEL_TYPE_MOBILE = "MOBILE_TF_LOW_LATENCY_1"
_TEST_PREDICTION_TYPE_ICN = "classification"
_TEST_PREDICTION_TYPE_IOD = "object_detection"

_TEST_DATASET_NAME = "test-dataset-name"
_TEST_MODEL_DISPLAY_NAME = "model-display-name"
_TEST_MODEL_ID = "98777645321"

_TEST_LABELS = {"key": "value"}
_TEST_MODEL_LABELS = {"model_key": "model_value"}

_TEST_TRAINING_TASK_INPUTS = json_format.ParseDict(
    {
        "modelType": "CLOUD",
        "budgetMilliNodeHours": _TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
        "multiLabel": False,
        "disableEarlyStopping": _TEST_TRAINING_DISABLE_EARLY_STOPPING,
    },
    struct_pb2.Value(),
)

_TEST_TRAINING_TASK_INPUTS_WITH_BASE_MODEL = json_format.ParseDict(
    {
        "modelType": "CLOUD",
        "budgetMilliNodeHours": _TEST_TRAINING_BUDGET_MILLI_NODE_HOURS,
        "multiLabel": False,
        "disableEarlyStopping": _TEST_TRAINING_DISABLE_EARLY_STOPPING,
        "baseModelId": _TEST_MODEL_ID,
    },
    struct_pb2.Value(),
)
def op_proto(json_dict: Dict) -> v2.program_pb2.Operation:
    op = v2.program_pb2.Operation()
    json_format.ParseDict(json_dict, op)
    return op
Exemple #20
0
 def _from_json_dict_(cls, metrics: str, **kwargs) -> 'Calibration':
     """Magic method for the JSON serialization protocol."""
     metric_proto = v2.metrics_pb2.MetricsSnapshot()
     return cls(json_format.ParseDict(metrics, metric_proto))
Exemple #21
0
def construct_response(
    user_model: SeldonComponent,
    is_request: bool,
    client_request: prediction_pb2.SeldonMessage,
    client_raw_response: Union[np.ndarray, str, bytes, dict],
    meta: dict = None,
    custom_metrics: List[Dict] = None,
) -> prediction_pb2.SeldonMessage:
    """

    Parameters
    ----------
    user_model
       Client user class
    is_request
       Whether this is part of the request flow as opposed to the response flow
    client_request
       The request received
    client_raw_response
       The raw client response from their model

    Returns
    -------
       A SeldonMessage proto response

    """
    data_type = client_request.WhichOneof("data_oneof")
    meta_pb = prediction_pb2.Meta()
    meta_json: Dict = {}

    if meta:
        tags = meta.get("tags", {})
        metrics = meta.get("metrics", [])
    else:
        tags = {}
        metrics = []
    custom_tags = client_custom_tags(user_model)
    if custom_tags:
        tags.update(custom_tags)
    if custom_metrics:
        metrics.extend(custom_metrics)
    if tags:
        meta_json["tags"] = tags
    if metrics:
        meta_json["metrics"] = metrics
    if client_request.meta:
        if client_request.meta.puid:
            meta_json["puid"] = client_request.meta.puid
    json_format.ParseDict(meta_json, meta_pb)
    if isinstance(client_raw_response, np.ndarray) or isinstance(
            client_raw_response, list):
        client_raw_response = np.array(client_raw_response)
        if is_request:
            names = client_feature_names(user_model, client_request.data.names)
        else:
            names = client_class_names(user_model, client_raw_response)
        if (
                data_type == "data"
        ):  # If request is using defaultdata then return what was sent if is numeric response else ndarray
            if np.issubdtype(client_raw_response.dtype, np.number):
                default_data_type = client_request.data.WhichOneof(
                    "data_oneof")
            else:
                default_data_type = "ndarray"
        else:  # If numeric response return as tensor else return as ndarray
            if np.issubdtype(client_raw_response.dtype, np.number):
                default_data_type = "tensor"
            else:
                default_data_type = "ndarray"
        data = array_to_grpc_datadef(default_data_type, client_raw_response,
                                     names)
        return prediction_pb2.SeldonMessage(data=data, meta=meta_pb)
    elif isinstance(client_raw_response, str):
        return prediction_pb2.SeldonMessage(strData=client_raw_response,
                                            meta=meta_pb)
    elif isinstance(client_raw_response, dict):
        jsonDataResponse = ParseDict(client_raw_response,
                                     prediction_pb2.SeldonMessage().jsonData)
        return prediction_pb2.SeldonMessage(jsonData=jsonDataResponse,
                                            meta=meta_pb)
    elif isinstance(client_raw_response, (bytes, bytearray)):
        return prediction_pb2.SeldonMessage(binData=client_raw_response,
                                            meta=meta_pb)
    else:
        raise SeldonMicroserviceException(
            "Unknown data type returned as payload:" + client_raw_response)
Exemple #22
0
def _create_serving_spec(payload: Dict[Text, Any]):
    result = infra_validator_pb2.ServingSpec()
    json_format.ParseDict(payload, result)
    return result
    },
    {
        "numeric": {
            "column_name": "PhotoAmt"
        }
    },
]

MODEL_ID = os.environ.get("MODEL_ID", "test-model-id")
MODEL_ARTIFACT_URI = os.environ.get("MODEL_ARTIFACT_URI",
                                    "path_to_folder_with_model_artifacts")
MODEL_NAME = f"projects/{PROJECT_ID}/locations/{REGION}/models/{MODEL_ID}"
JOB_DISPLAY_NAME = f"temp_create_batch_prediction_job_test_{uuid4()}"
BIGQUERY_SOURCE = f"bq://{PROJECT_ID}.test_iowa_liquor_sales_forecasting_us.2021_sales_predict"
GCS_DESTINATION_PREFIX = "gs://test-vertex-ai-bucket-us/output"
MODEL_PARAMETERS = json_format.ParseDict({}, Value())

ENDPOINT_CONF = {
    "display_name": f"endpoint_test_{uuid4()}",
}
DEPLOYED_MODEL = {
    # format: 'projects/{project}/locations/{location}/models/{model}'
    'model': f"projects/{PROJECT_ID}/locations/{REGION}/models/{MODEL_ID}",
    'display_name': f"temp_endpoint_test_{uuid4()}",
    "dedicated_resources": {
        "machine_spec": {
            "machine_type": "n1-standard-2",
            "accelerator_type":
            aiplatform.gapic.AcceleratorType.NVIDIA_TESLA_K80,
            "accelerator_count": 1,
        },
def _create_local_docker_config(payload: Dict[Text, Any]):
    config = LocalDockerConfig()
    json_format.ParseDict(payload, config)
    return config
Exemple #25
0
    def __init__(self,
                 document: Optional[DocumentSourceType] = None,
                 field_resolver: Dict[str, str] = None,
                 copy: bool = False,
                 **kwargs):
        """

        :param document: the document to construct from. If ``bytes`` is given
                then deserialize a :class:`DocumentProto`; ``dict`` is given then
                parse a :class:`DocumentProto` from it; ``str`` is given, then consider
                it as a JSON string and parse a :class:`DocumentProto` from it; finally,
                one can also give `DocumentProto` directly, then depending on the ``copy``,
                it builds a view or a copy from it.
        :param copy: when ``document`` is given as a :class:`DocumentProto` object, build a
                view (i.e. weak reference) from it or a deep copy from it.
        :param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
                names defined in Protobuf. This is only used when the given ``document`` is
                a JSON string or a Python dict.
        :param kwargs: other parameters to be set _after_ the document is constructed

        .. note::

            When ``document`` is a JSON string or Python dictionary object, the constructor will only map the values
            from known fields defined in Protobuf, all unknown fields are mapped to ``document.tags``. For example,

            .. highlight:: python
            .. code-block:: python

                d = Document({'id': '123', 'hello': 'world', 'tags': {'good': 'bye'}})

                assert d.id == '123'  # true
                assert d.tags['hello'] == 'world'  # true
                assert d.tags['good'] == 'bye'  # true
        """
        self._pb_body = jina_pb2.DocumentProto()
        try:
            if isinstance(document, jina_pb2.DocumentProto):
                if copy:
                    self._pb_body.CopyFrom(document)
                else:
                    self._pb_body = document
            elif isinstance(document, (dict, str)):
                if isinstance(document, str):
                    document = json.loads(document)

                if field_resolver:
                    document = {
                        field_resolver.get(k, k): v
                        for k, v in document.items()
                    }

                user_fields = set(document.keys())
                if _document_fields.issuperset(user_fields):
                    json_format.ParseDict(document, self._pb_body)
                else:
                    _intersect = _document_fields.intersection(user_fields)
                    _remainder = user_fields.difference(_intersect)
                    if _intersect:
                        json_format.ParseDict(
                            {k: document[k]
                             for k in _intersect}, self._pb_body)
                    if _remainder:
                        self._pb_body.tags.update(
                            {k: document[k]
                             for k in _remainder})
            elif isinstance(document, bytes):
                # directly parsing from binary string gives large false-positive
                # fortunately protobuf throws a warning when the parsing seems go wrong
                # the context manager below converts this warning into exception and throw it
                # properly
                with warnings.catch_warnings():
                    warnings.filterwarnings('error',
                                            'Unexpected end-group tag',
                                            category=RuntimeWarning)
                    try:
                        self._pb_body.ParseFromString(document)
                    except RuntimeWarning as ex:
                        raise BadDocType(
                            f'fail to construct a document from {document}'
                        ) from ex
            elif isinstance(document, Document):
                if copy:
                    self._pb_body.CopyFrom(document.proto)
                else:
                    self._pb_body = document.proto
            elif document is not None:
                # note ``None`` is not considered as a bad type
                raise ValueError(f'{typename(document)} is not recognizable')
        except Exception as ex:
            raise BadDocType(
                f'fail to construct a document from {document}, '
                f'if you are trying to set the content '
                f'you may use "Document(content=your_content)"') from ex

        if self._pb_body.id is None or not self._pb_body.id:
            self.id = random_identity(use_uuid1=True)

        self.set_attrs(**kwargs)
        self._mermaid_id = random_identity()  #: for mermaid visualize id
Exemple #26
0
def parse_vulnerability_from_dict(data, key_path=None):
  """Parse vulnerability from dict."""
  data = _get_nested_vulnerability(data, key_path)
  vulnerability = vulnerability_pb2.Vulnerability()
  json_format.ParseDict(data, vulnerability, ignore_unknown_fields=True)
  return vulnerability
Exemple #27
0
def dict_to_struct(dict_obj):
    return json_format.ParseDict(dict_obj, struct_pb2.Struct())
Exemple #28
0
 def from_proto_dict(self, proto: Dict) -> ops.GateOperation:
     """Turns a cirq.api.google.v2.Operation proto into a GateOperation."""
     msg = v2.program_pb2.Operation()
     json_format.ParseDict(proto, msg)
     return self.from_proto(msg)
def op_proto(json):
    """Json to proto."""
    op = program_pb2.Operation()
    json_format.ParseDict(json, op)
    return op
Exemple #30
0
 def init(cls, request, context, rest_only=None):
     time_created = datetime.datetime.now()
     metadata = None
     if context is not None:
         metadata = request.bucket
     else:
         metadata, rest_only = cls.__preprocess_rest(
             json.loads(request.data))
         metadata = json_format.ParseDict(metadata, resources_pb2.Bucket())
     cls.__validate_bucket_name(metadata.name, context)
     default_projection = CommonEnums.Projection.NO_ACL
     if len(metadata.acl) != 0 or len(metadata.default_object_acl) != 0:
         default_projection = CommonEnums.Projection.FULL
     is_uniform = metadata.iam_configuration.uniform_bucket_level_access.enabled
     metadata.iam_configuration.uniform_bucket_level_access.enabled = False
     if len(metadata.acl) == 0:
         predefined_acl = utils.acl.extract_predefined_acl(
             request, False, context)
         if (predefined_acl == CommonEnums.PredefinedBucketAcl.
                 PREDEFINED_BUCKET_ACL_UNSPECIFIED):
             predefined_acl = (
                 CommonEnums.PredefinedBucketAcl.BUCKET_ACL_PROJECT_PRIVATE)
         elif predefined_acl == "":
             predefined_acl = "projectPrivate"
         elif is_uniform:
             utils.error.invalid(
                 "Predefined ACL with uniform bucket level access enabled",
                 context)
         cls.__insert_predefined_acl(metadata, predefined_acl, context)
     if len(metadata.default_object_acl) == 0:
         predefined_default_object_acl = utils.acl.extract_predefined_default_object_acl(
             request, context)
         if (predefined_default_object_acl == CommonEnums.
                 PredefinedObjectAcl.PREDEFINED_OBJECT_ACL_UNSPECIFIED):
             predefined_default_object_acl = (
                 CommonEnums.PredefinedObjectAcl.OBJECT_ACL_PROJECT_PRIVATE)
         elif predefined_default_object_acl == "":
             predefined_default_object_acl = "projectPrivate"
         elif is_uniform:
             utils.error.invalid(
                 "Predefined Default Object ACL with uniform bucket level access enabled",
                 context,
             )
         cls.__insert_predefined_default_object_acl(
             metadata, predefined_default_object_acl, context)
     cls.__enrich_acl(metadata)
     metadata.iam_configuration.uniform_bucket_level_access.enabled = is_uniform
     metadata.id = metadata.name
     metadata.project_number = int(utils.acl.PROJECT_NUMBER)
     metadata.metageneration = 0
     metadata.etag = hashlib.md5(metadata.name.encode("utf-8")).hexdigest()
     metadata.time_created.FromDatetime(time_created)
     metadata.updated.FromDatetime(time_created)
     metadata.owner.entity = utils.acl.get_project_entity("owners", context)
     metadata.owner.entity_id = hashlib.md5(
         metadata.owner.entity.encode("utf-8")).hexdigest()
     if rest_only is None:
         rest_only = {}
     return (
         cls(metadata, {}, cls.__init_iam_policy(metadata, context),
             rest_only),
         utils.common.extract_projection(request, default_projection,
                                         context),
     )