def run(): channel = grpc.insecure_channel('192.168.10.1:50051') stub = gobgp_pb2_grpc.GobgpApiStub(channel) nlri = Any() nlri.Pack( attribute_pb2.IPAddressPrefix( prefix_len=24, prefix="192.168.10.0", )) origin = Any() origin.Pack(attribute_pb2.OriginAttribute( origin=2, # INCOMPLETE )) next_hop = Any() next_hop.Pack(attribute_pb2.NextHopAttribute(next_hop="0.0.0.0", )) attributes = [origin, next_hop] stub.AddPath( gobgp_pb2.AddPathRequest(table_type=gobgp_pb2.GLOBAL, path=gobgp_pb2.Path( nlri=nlri, pattrs=attributes, family=gobgp_pb2.Family( afi=gobgp_pb2.Family.AFI_IP, safi=gobgp_pb2.Family.SAFI_UNICAST), )), _TIMEOUT_SECONDS, )
def get_packed_pattrs(self: 'Path'): pattrs = [] # actions if self.actions: ac = Any() ac.Pack(pack_actions(*self.actions)) pattrs.append(ac) # next hops nh = self.next_hops if nh is None or len(nh) == 0: nh = [self.next_hop_not_important()] nh_attribute = Any() nh_attribute.Pack( attribute_pb2.MpReachNLRIAttribute( family=self.get_family(), nlris=[ pack_nlris(*self.nlris), ], next_hops=nh, )) pattrs.append(nh_attribute) # origin og = Any() og.Pack(attribute_pb2.OriginAttribute(origin=self.origin)) pattrs.append(og) if self.additional_pattrs is not None: pattrs.extend(self.additional_pattrs) return pattrs
async def test_other_error_details_present(): any1 = Any() any1.Pack(RetryInfo()) any2 = Any() any2.Pack(ErrorInfo(reason="RESET", domain="pubsublite.googleapis.com")) status_pb = Status(code=10, details=[any1, any2]) assert is_reset_signal(Aborted("", response=make_call(status_pb)))
def test_list_database_operations_defaults(self): from google.api_core.operation import Operation from google.cloud.spanner_admin_database_v1 import CreateDatabaseMetadata from google.cloud.spanner_admin_database_v1 import DatabaseAdminClient from google.cloud.spanner_admin_database_v1 import ListDatabaseOperationsRequest from google.cloud.spanner_admin_database_v1 import ( ListDatabaseOperationsResponse, ) from google.cloud.spanner_admin_database_v1 import ( OptimizeRestoredDatabaseMetadata, ) from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any api = DatabaseAdminClient(credentials=mock.Mock()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) create_database_metadata = Any() create_database_metadata.Pack( CreateDatabaseMetadata.pb(CreateDatabaseMetadata(database="database")) ) optimize_database_metadata = Any() optimize_database_metadata.Pack( OptimizeRestoredDatabaseMetadata.pb( OptimizeRestoredDatabaseMetadata(name="database") ) ) databases_pb = ListDatabaseOperationsResponse( operations=[ operations_pb2.Operation(name="op1", metadata=create_database_metadata), operations_pb2.Operation( name="op2", metadata=optimize_database_metadata ), ] ) ldo_api = api._transport._wrapped_methods[ api._transport.list_database_operations ] = mock.Mock(return_value=databases_pb) ops = instance.list_database_operations() expected_metadata = ( ("google-cloud-resource-prefix", instance.name), ("x-goog-request-params", "parent={}".format(instance.name)), ) ldo_api.assert_called_once_with( ListDatabaseOperationsRequest(parent=self.INSTANCE_NAME), metadata=expected_metadata, retry=mock.ANY, timeout=mock.ANY, ) self.assertTrue(all([type(op) == Operation for op in ops]))
def test_list_database_operations_w_options(self): from google.api_core.operation import Operation from google.cloud.spanner_admin_database_v1 import DatabaseAdminClient from google.cloud.spanner_admin_database_v1 import ListDatabaseOperationsRequest from google.cloud.spanner_admin_database_v1 import ( ListDatabaseOperationsResponse, ) from google.cloud.spanner_admin_database_v1 import RestoreDatabaseMetadata from google.cloud.spanner_admin_database_v1 import RestoreSourceType from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlMetadata from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any api = DatabaseAdminClient(credentials=mock.Mock()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) restore_database_metadata = Any() restore_database_metadata.Pack( RestoreDatabaseMetadata.pb( RestoreDatabaseMetadata(name="database", source_type=RestoreSourceType.BACKUP))) update_database_metadata = Any() update_database_metadata.Pack( UpdateDatabaseDdlMetadata.pb( UpdateDatabaseDdlMetadata(database="database", statements=["statements"]))) databases_pb = ListDatabaseOperationsResponse(operations=[ operations_pb2.Operation(name="op1", metadata=restore_database_metadata), operations_pb2.Operation(name="op2", metadata=update_database_metadata), ]) ldo_api = api._transport._wrapped_methods[ api._transport.list_database_operations] = mock.Mock( return_value=databases_pb) ops = instance.list_database_operations(filter_="filter", page_size=10) expected_metadata = ( ("google-cloud-resource-prefix", instance.name), ("x-goog-request-params", "parent={}".format(instance.name)), ) ldo_api.assert_called_once_with( ListDatabaseOperationsRequest(parent=self.INSTANCE_NAME, filter="filter", page_size=10), metadata=expected_metadata, retry=mock.ANY, timeout=mock.ANY, ) self.assertTrue(all([type(op) == Operation for op in ops]))
def test_list_database_operations_w_options(self): from google.api_core.operation import Operation from google.cloud.spanner_admin_database_v1.gapic import database_admin_client from google.cloud.spanner_admin_database_v1.proto import ( spanner_database_admin_pb2, ) from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any api = database_admin_client.DatabaseAdminClient(mock.Mock()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) restore_database_metadata = Any() restore_database_metadata.Pack( spanner_database_admin_pb2.RestoreDatabaseMetadata() ) update_database_metadata = Any() update_database_metadata.Pack( spanner_database_admin_pb2.UpdateDatabaseDdlMetadata() ) databases_pb = spanner_database_admin_pb2.ListDatabaseOperationsResponse( operations=[ operations_pb2.Operation( name="op1", metadata=restore_database_metadata ), operations_pb2.Operation(name="op2", metadata=update_database_metadata), ] ) ldo_api = api._inner_api_calls["list_database_operations"] = mock.Mock( return_value=databases_pb ) operations = instance.list_database_operations(filter_="filter", page_size=10) for op in operations: self.assertIsInstance(op, Operation) expected_metadata = [ ("google-cloud-resource-prefix", instance.name), ("x-goog-request-params", "parent={}".format(instance.name)), ] ldo_api.assert_called_once_with( spanner_database_admin_pb2.ListDatabaseOperationsRequest( parent=self.INSTANCE_NAME, filter="filter", page_size=10 ), metadata=expected_metadata, retry=mock.ANY, timeout=mock.ANY, )
def OnInvoke(self, request, context): response = "" if request.method == 'my_method': response = Any(value='INVOKE_RECEIVED'.encode('utf-8')) else: response = Any(value='METHOD_NOT_SUPPORTED'.encode('utf-8')) return response # Return response to caller return response
def OnInvoke(self, request, context): data = None content_type = "" if request.method == 'my-method': data = Any(value='INVOKE_RECEIVED'.encode('utf-8')) content_type = "text/plain; charset=UTF-8" else: data = Any(value='unsupported methods'.encode('utf-8')) content_type = "text/plain; charset=UTF-8" # Return response to caller return common_v1.InvokeResponse(data=data, content_type=content_type)
def OnInvoke(self, request, context): data=None content_type="" if request.method == 'my_method': custom_response = response_messages.CustomResponse(isSuccess=True, code=200, message="Hello World - Success!") data = Any() data.Pack(custom_response) else: data = Any(value='METHOD_NOT_SUPPORTED'.encode('utf-8')) content_type="text/plain" print(data, flush=True) print(content_type, flush=True) return common_v1.InvokeResponse(data=data, content_type=content_type)
def OnInvoke(self, request, context): response = "" if request.method == 'my_method': a = Any() a.Pack( response_messages.CustomResponse( isSuccess=True, code=200, message="Hello World - Success!")) response = a else: response = Any(value='METHOD_NOT_SUPPORTED'.encode('utf-8')) return response
def wrap(request_id, outgoing): wrapped = Wrapper() wrapped.request_id = request_id message = Any() message.Pack(outgoing) wrapped.message.CopyFrom(message) return wrapped
def to_typed_value_any_state(proto_msg): any = Any() any.Pack(proto_msg) typed_value = TypedValue() typed_value.typename = "type.googleapis.com/google.protobuf.Any" typed_value.value = any.SerializeToString() return typed_value
def to_typed_value(proto_msg): any = Any() any.Pack(proto_msg) typed_value = TypedValue() typed_value.typename = any.type_url typed_value.value = any.value return typed_value
def _pack_any(value) -> Any: if isinstance(value, Any): return value proto = Any() proto.Pack(value) return proto
def get_files(self): ''' Return (non-template) files in this chart ''' # TODO(yanivoliver): add support for .helmignore # TODO(yanivoliver): refactor seriously to be similar to what Helm does # (https://github.com/helm/helm/blob/master/pkg/chartutil/load.go) chart_files = [] for root, _, files in os.walk(self.source_directory): if root.endswith("charts") or root.endswith("templates"): continue for file in files: if file in (".helmignore", "Chart.yaml", "values.toml", "values.yaml"): continue filename = os.path.relpath(os.path.join(root, file), self.source_directory) # TODO(yanivoliver): Find a better solution. # We need this in order to support charts on Windows - Tiller will look # for the files it uses using the relative path, using Linuxish # path seperators (/). Thus, sending the file list to Tiller # from a Windows machine the lookup will fail. filename = filename.replace("\\", "/") with open(os.path.join(root, file), "r") as fd: chart_files.append(Any(type_url=filename, value=fd.read())) return chart_files
def _instance_api_response_for_create(): import datetime from google.api_core import operation from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) from google.cloud.bigtable_admin_v2.types import instance NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) type_url = "type.googleapis.com/{}".format( messages_v2_pb2.CreateInstanceMetadata._meta._pb.DESCRIPTOR.full_name) response_pb = operations_pb2.Operation( name=OP_NAME, metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), ) response = operation.from_gapic( response_pb, mock.Mock(), instance.Instance, metadata_type=messages_v2_pb2.CreateInstanceMetadata, ) project_path_template = "projects/{}" location_path_template = "projects/{}/locations/{}" api = _make_instance_admin_api() api.create_instance.return_value = response api.project_path = project_path_template.format api.location_path = location_path_template.format api.common_location_path = location_path_template.format return api, response
def _readFlowMeasurement(self, module, clear, quantiles): # create request for flow measurements and send to bess request = module_msg.FlowMeasureCommandReadArg( clear=clear, latency_percentiles=quantiles, jitter_percentiles=quantiles, ) any = Any() any.Pack(request) response = self.bess_client.ModuleCommand( bess_msg.CommandRequest( name=module, cmd="read", arg=any, ), timeout=5, ) # unpack response and return results data = response.data msg = module_msg.FlowMeasureReadResponse() if data.Is(module_msg.FlowMeasureReadResponse.DESCRIPTOR): data.Unpack(msg) msg = MessageToDict(msg) if "statistics" in msg: return msg["statistics"] return msg
def test_from_pb_w_metadata_and_kwargs(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct, Value from google.cloud import operation as MUT from unit_tests._testing import _Monkey TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) type_url_map = {TYPE_URI: Struct} client = _Client() meta = Struct(fields={'foo': Value(string_value=u'Bar')}) metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._getTargetClass() with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): operation = klass.from_pb(operation_pb, client, baz='qux') self.assertEqual(operation.name, self.OPERATION_NAME) self.assertTrue(operation.client is client) pb_metadata = operation.pb_metadata self.assertTrue(isinstance(pb_metadata, Struct)) self.assertEqual(list(pb_metadata.fields), ['foo']) self.assertEqual(pb_metadata.fields['foo'].string_value, 'Bar') self.assertEqual(operation.metadata, {'baz': 'qux'})
def get_files(self): ''' Return (non-template) files in this chart ''' # TODO(yanivoliver): add support for .helmignore # TODO(yanivoliver): refactor seriously to be similar to what Helm does # (https://github.com/helm/helm/blob/master/pkg/chartutil/load.go) chart_files = [] template_dir = pathlib.Path(self.source_directory, 'templates') if not template_dir.exists(): self._logger.warn( "Chart %s has no templates directory, no templates will be deployed", self.chart.name) for f in template_dir.glob('**/*'): if not f.is_file(): continue if f.name in (".helmignore", "Chart.yaml", "values.toml", "values.yaml"): continue chart_files.append( Any( type_url=f.relative_to(self.source_directory).as_posix(), value=f.read_bytes(), )) return chart_files
def encode_to_any(type_url, data): ''' Encode the provided data into :obj:`protobuf.Any`. This function encodes string with UTF-8, integer with helper function `int_to_bytes`, and proto messages with internal `SerializeToString()` method. Args: type_url(string): the type_url to encode the data with. This will be the `type_url` field of final result. data(bytes or tx): the data to encode. This will be the `value` field of final result. Returns: :obj:`Any` Examples: >>> res = encode_to_any('test_string','test') >>> res.type_url 'test_string >>> res.value b'test' ''' if isinstance(data, str): value = data.encode() elif isinstance(data, int): value = conversion.int_to_bytes(data) elif isinstance(data, bytes): value = data else: value = data.SerializeToString() return Any(type_url=type_url, value=value)
def get_files(self): ''' Return (non-template) files in this chart ''' # TODO(yanivoliver): refactor seriously to be similar to what Helm does # (https://github.com/helm/helm/blob/master/pkg/chartutil/load.go) chart_files = [] for root, _, files in os.walk(self.source_directory): if not ChartBuilder.is_ignorable(root): helmignore_list = ChartBuilder.get_helmignore(root=root) absolute_paths = [os.path.join(root, file) for file in files] yaml_files = ChartBuilder.remove_helmignored_files( files=absolute_paths, helmignore_list=helmignore_list) yaml_files = ChartBuilder.remove_necessary_files( yaml_files=yaml_files) for file in yaml_files: filename = os.path.relpath(file, self.source_directory) # TODO(yanivoliver): Find a better solution. # We need this in order to support charts on Windows - Tiller will look # for the files it uses using the relative path, using Linuxish # path seperators (/). Thus, sending the file list to Tiller # from a Windows machine the lookup will fail. filename = filename.replace("\\", "/") chart_files.append( Any(type_url=filename, value=ChartBuilder.read_file(file))) return chart_files
def training_started(self, behavior_name: str, config: TrainerSettings) -> None: msg = TrainingBehaviorInitialized( behavior_name=behavior_name, trainer_type=config.trainer_type.value, extrinsic_reward_enabled=(RewardSignalType.EXTRINSIC in config.reward_signals), gail_reward_enabled=(RewardSignalType.GAIL in config.reward_signals), curiosity_reward_enabled=(RewardSignalType.CURIOSITY in config.reward_signals), rnd_reward_enabled=(RewardSignalType.RND in config.reward_signals), behavioral_cloning_enabled=config.behavioral_cloning is not None, recurrent_enabled=config.network_settings.memory is not None, visual_encoder=config.network_settings.vis_encode_type.value, num_network_layers=config.network_settings.num_layers, num_network_hidden_units=config.network_settings.hidden_units, trainer_threaded=config.threaded, self_play_enabled=config.self_play is not None, curriculum_enabled=self._behavior_uses_curriculum(behavior_name), ) any_message = Any() any_message.Pack(msg) training_start_msg = OutgoingMessage() training_start_msg.set_raw_bytes(any_message.SerializeToString()) super().queue_message_to_send(training_start_msg)
def addFAR(self, far, debug=False): # set action value for far action action = self._setActionValue(far) # parse fields of far into ExactMatchCommandAddArg f = module_msg.ExactMatchCommandAddArg( gate=far.tunnelType, fields=[ util_msg.FieldData(value_int=far.farID), util_msg.FieldData(value_int=far.fseID), ], values=[ util_msg.FieldData(value_int=action), util_msg.FieldData(value_int=far.tunnelType), util_msg.FieldData(value_int=far.tunnelIP4Src), util_msg.FieldData(value_int=far.tunnelIP4Dst), util_msg.FieldData(value_int=far.tunnelTEID), util_msg.FieldData(value_int=far.tunnelPort), ], ) # store into Any() message protobuf type any = Any() any.Pack(f) # send request to UPF to add rule response = self.bess_client.ModuleCommand( bess_msg.CommandRequest(name="farLookup", cmd="add", arg=any), timeout=5, ) if debug: print(response) self.fars.append(far)
def OnInvoke(self, request, context): data = None content_type = "" logger.info("================== REQUEST ==================") logger.info(f"Content Type: {request.content_type}") logger.info(f"Message: {request.data.value}") if request.method == 'my_method': data = Any(value='SMSG_INVOKE_REQUEST'.encode('utf-8')) content_type = "text/plain; charset=UTF-8" else: data = Any(value='METHOD_NOT_SUPPORTED'.encode('utf-8')) content_type = "text/plain; charset=UTF-8" return commonv1pb.InvokeResponse(data=data, content_type=content_type)
def structure_iris_response(score): iris_response = IrisPredictResponse(setosa=score[0][0], versicolor=score[0][1], virginica=score[0][2]) response = Any() response.Pack(iris_response) return response
def test_from_pb_w_metadata_and_kwargs(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud import operation as MUT from google.cloud._testing import _Monkey type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name, ) type_url_map = {type_url: Struct} client = _Client() meta = Struct(fields={'foo': Value(string_value=u'Bar')}) metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): operation = klass.from_pb(operation_pb, client, baz='qux') self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertEqual(operation.metadata, meta) self.assertEqual(operation.caller_metadata, {'baz': 'qux'})
def _instance_api_response_for_update(self): import datetime from google.api_core import operation from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client from google.cloud.bigtable_admin_v2.proto import ( bigtable_instance_admin_pb2 as messages_v2_pb2, ) from google.cloud.bigtable_admin_v2.types import instance_pb2 NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) metadata = messages_v2_pb2.UpdateInstanceMetadata(request_time=NOW_PB) type_url = "type.googleapis.com/{}".format( messages_v2_pb2.UpdateInstanceMetadata.DESCRIPTOR.full_name) response_pb = operations_pb2.Operation( name=self.OP_NAME, metadata=Any(type_url=type_url, value=metadata.SerializeToString()), ) response = operation.from_gapic( response_pb, mock.Mock(), instance_pb2.Instance, metadata_type=messages_v2_pb2.UpdateInstanceMetadata, ) instance_path_template = "projects/{project}/instances/{instance}" instance_api = mock.create_autospec( bigtable_instance_admin_client.BigtableInstanceAdminClient) instance_api.partial_update_instance.return_value = response instance_api.instance_path = instance_path_template.format return instance_api, response
def build_aspath(path): # XXX: super quickly written AS path parser... hops = path.split(" ") segs = [] cur_type = 2 cur_nbrs = [] def reset(ntype): nonlocal segs nonlocal cur_type nonlocal cur_nbrs seg = attribute_pb2.AsSegment(numbers=[int(x) for x in cur_nbrs]) seg.type = cur_type if len(cur_nbrs) > 0: segs.append(seg) cur_type = ntype cur_nbrs = [] for hop in hops: if "{" in hop: if cur_type != 1: reset(1) cur_nbrs += hop.replace("{", "").replace("}", "").split(",") else: if cur_type != 2: reset(2) cur_nbrs += [hop] reset(0) as_path = Any() as_path.Pack(attribute_pb2.AsPathAttribute(segments=segs, )) return as_path
def test_decode_time(self): any_message = Any() timestamp_message = Timestamp() timestamp_message.GetCurrentTime() any_message.Pack(timestamp_message) assert timestamp_message.ToDatetime() == Client._decode_any( any_message)
def test_update_app_profile_routing_single(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) from google.protobuf import field_mask_pb2 credentials = _make_credentials() client = self._make_client(project=self.PROJECT, credentials=credentials, admin=True) instance = client.instance(self.INSTANCE_ID) routing = RoutingPolicyType.ANY app_profile = self._make_one(self.APP_PROFILE_ID, instance, routing_policy_type=routing) # Create response_pb metadata = messages_v2_pb2.UpdateAppProfileMetadata() type_url = "type.googleapis.com/{}".format( messages_v2_pb2.UpdateAppProfileMetadata._meta._pb.DESCRIPTOR. full_name) response_pb = operations_pb2.Operation( name=self.OP_NAME, metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), ) # Patch the stub used by the API method. instance_api = mock.create_autospec(BigtableInstanceAdminClient) # Mock api calls instance_api.app_profile_path.return_value = ( "projects/project/instances/instance-id/appProfiles/app-profile-id" ) client._instance_admin_client = instance_api client._instance_admin_client.update_app_profile.return_value = response_pb # Perform the method and check the result. ignore_warnings = True expected_request_update_mask = field_mask_pb2.FieldMask( paths=["multi_cluster_routing_use_any"]) expected_request = { "request": { "app_profile": app_profile._to_pb(), "update_mask": expected_request_update_mask, "ignore_warnings": ignore_warnings, } } result = app_profile.update(ignore_warnings=ignore_warnings) actual_request = client._instance_admin_client.update_app_profile.call_args_list[ 0].kwargs self.assertEqual(actual_request, expected_request) self.assertEqual( result.metadata.type_url, "type.googleapis.com/google.bigtable.admin.v2.UpdateAppProfileMetadata", )