def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: """ Converts field value Proto to Dict and returns each field's Feast Value Type value in their respective Python value. Args: field_value_proto: Field value Proto Returns: Python native type representation/version of the given field_value_proto """ field_value_dict = MessageToDict(field_value_proto) for k, v in field_value_dict.items(): if k == "int64Val": return int(v) if k == "bytesVal": return bytes(v) if (k == "int64ListVal") or (k == "int32ListVal"): return [int(item) for item in v["val"]] if (k == "floatListVal") or (k == "doubleListVal"): return [float(item) for item in v["val"]] if k == "stringListVal": return [str(item) for item in v["val"]] if k == "bytesListVal": return [bytes(item) for item in v["val"]] if k == "boolListVal": return [bool(item) for item in v["val"]] if k in ["int32Val", "floatVal", "doubleVal", "stringVal", "boolVal"]: return v else: raise TypeError( f"Casting to Python native type for type {k} failed. " f"Type {k} not found")
def __mermaid_str__(self): results = [] from google.protobuf.json_format import MessageToDict content = MessageToDict(self._pb_body, preserving_proto_field_name=True) _id = f'{self._mermaid_id[:3]}~Document~' for idx, c in enumerate(self.chunks): results.append( f'{_id} --> "{idx + 1}/{len(self.chunks)}" {c._mermaid_id[:3]}~Document~: chunks' ) results.append(c.__mermaid_str__()) for idx, c in enumerate(self.matches): results.append( f'{_id} ..> "{idx + 1}/{len(self.matches)}" {c._mermaid_id[:3]}~Document~: matches' ) results.append(c.__mermaid_str__()) if 'chunks' in content: content.pop('chunks') if 'matches' in content: content.pop('matches') if content: results.append(f'class {_id}{{') for k, v in content.items(): if isinstance(v, (str, int, float, bytes)): results.append(f'+{k} {str(v)[:10]}') else: results.append(f'+{k}({type(getattr(self, k, v))})') results.append('}') return '\n'.join(results)
def to_dict(obj): """Convert a gRPC message to its dictionary representation. Args: obj: The gRPC message to convert to dict. Returns: dict: A dictionary representation of the message. """ d = MessageToDict(obj, including_default_value_fields=True) response = {} for key, value in d.items(): # This check is a bit of a hack to ensure that the []byte encoded # in the V3WriteData's `data` field does not get serialized out # when MessageToDict is called. if isinstance(obj, V3WriteData) and key == 'data': response[key] = obj.data continue elif isinstance(obj, V3TransactionStatus) and key == 'context': response[key] = to_dict(obj.context) continue elif isinstance(obj, V3WritePayload) and key == 'data': response[key] = [to_dict(o) for o in obj.data] continue elif isinstance(obj, V3WriteTransaction) and key == 'context': response[key] = to_dict(obj.context) continue new_key = _re_all.sub(r'\1_\2', _re_first.sub(r'\1_\2', key)).lower() response[new_key] = value return response
def pb_to_snapshot(pb_snapshot): """ Converts a Snapshot from protobuf object to protocol object. :param pb_snaphot: The protobuf snapshot. :type pb_snapshot: :class:`sphere_pb2.Snapshot` object :return: The snapshot :rtype: :class:`sphere.protocol.Snapshot` object """ snapshot_dict = MessageToDict(pb_snapshot, preserving_proto_field_name=True, including_default_value_fields=True) # for some reason google's MessageToDict converts bytes to base64 string # https://github.com/protocolbuffers/protobuf/issues/4525 snapshot_dict['color_image']['data'] = base64.b64decode( snapshot_dict['color_image']['data']) for key, value in snapshot_dict.items(): if key == 'pose': translation = list(value['translation'].values()) rotation = list(value['rotation'].values()) elif key == 'color_image': snapshot_dict[key] = list(value.values()) elif key == 'depth_image': snapshot_dict[key] = list(value.values()) elif key == 'feelings': snapshot_dict[key] = list(value.values()) elif key == 'datetime': # MessageToDict converts uint64 to string # https://github.com/protocolbuffers/protobuf/issues/1823 snapshot_dict['datetime'] = int(snapshot_dict['datetime']) else: snapshot_dict[key] = value del snapshot_dict['pose'] snapshot_dict['translation'] = translation snapshot_dict['rotation'] = rotation return Snapshot(**snapshot_dict)
def create_pipeline_from_file(self, filePath): """ Create Pipeline from pachyderm pipeline json config file. """ pipelineConfig = self.__load_json_config(filePath) parsed = ParseDict(pipelineConfig, proto.PipelineInfo(), ignore_unknown_fields=True) pipelineName = parsed.pipeline.name updated = self._has_pipeline_config_changed(pipelineName) if updated: print(self._diff(pipelineName)) print('%s Updating...' % (pipelineName)) if (not self.is_resource_already_exist(pipelineName)) or updated: configDict = MessageToDict(parsed, including_default_value_fields=False) onlyPythonPachydermKeysConfigDict = { convert(oldKey): value for oldKey, value in configDict.items() if convert(oldKey) in FIELDS } convertedDict = map_nested_dicts_modify_key( onlyPythonPachydermKeysConfigDict, convert) map_nested_dicts_modify(convertedDict, force_number) self.pps.create_pipeline(pipelineName, **convertedDict, update=updated)
def build_model_from_pb(name, pb_model): from google.protobuf.json_format import MessageToDict dp = MessageToDict(pb_model(), including_default_value_fields=True) all_fields = { k: (name if k in ('chunks', 'matches') else type(v), Field(default=v)) for k, v in dp.items() } if pb_model == QueryLangProto: all_fields['parameters'] = (Dict, Field(default={})) return create_model(name, **all_fields)
def get_pipeline(self, pipeline): try: inspected = self.pps.inspect_pipeline(pipeline) messageDict = MessageToDict(inspected, including_default_value_fields=False) pipelineInfo = { convert(k): v for k, v in messageDict.items() if convert(k) not in IGNORED_FROM_DIFF_FIELDS } map_nested_dicts_modify(pipelineInfo, force_number) return pipelineInfo except Exception: return None
def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: """ Converts field value Proto to Dict and returns each field's Feast Value Type value in their respective Python value. Args: field_value_proto: Field value Proto Returns: Python native type representation/version of the given field_value_proto """ field_value_dict = MessageToDict(field_value_proto, float_precision=18) # type: ignore # This can happen when proto_json.patch() has been called before this call, which is true for a feature server if not isinstance(field_value_dict, dict): return field_value_dict for k, v in field_value_dict.items(): if "List" in k: val = v.get("val", []) else: val = v if k == "int64Val": return int(val) if k == "bytesVal": return bytes(val) if (k == "int64ListVal") or (k == "int32ListVal"): return [int(item) for item in val] if (k == "floatListVal") or (k == "doubleListVal"): return [float(item) for item in val] if k == "stringListVal": return [str(item) for item in val] if k == "bytesListVal": return [bytes(item) for item in val] if k == "boolListVal": return [bool(item) for item in val] if k in ["int32Val", "floatVal", "doubleVal", "stringVal", "boolVal"]: return val else: raise TypeError( f"Casting to Python native type for type {k} failed. " f"Type {k} not found" )
def build_model_from_pb(name: str, pb_model: Callable): """ Build model from protobuf message. :param name: Name of the model. :param pb_model: protobuf message. :return: Model. """ from google.protobuf.json_format import MessageToDict dp = MessageToDict(pb_model(), including_default_value_fields=True) all_fields = { k: (name if k in ('chunks', 'matches') else type(v), Field(default=v)) for k, v in dp.items() } if pb_model == QueryLangProto: all_fields['parameters'] = (Dict, Field(default={})) return create_model(name, **all_fields)