def test_explain_flattened(): client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.explain), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = prediction_service.ExplainResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.explain( endpoint="endpoint_value", instances=[struct.Value(null_value=struct.NullValue.NULL_VALUE)], parameters=struct.Value(null_value=struct.NullValue.NULL_VALUE), deployed_model_id="deployed_model_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].endpoint == "endpoint_value" assert args[0].instances == [ struct.Value(null_value=struct.NullValue.NULL_VALUE) ] # https://github.com/googleapis/gapic-generator-python/issues/414 # assert args[0].parameters == struct.Value( # null_value=struct.NullValue.NULL_VALUE # ) assert args[0].deployed_model_id == "deployed_model_id_value"
def test_unpack_scalar_proto(self): scalar = struct_pb2.Value(string_value='my message') tensor = tensor_utils.pack_tensor(scalar) unpacked = struct_pb2.Value() tensor_utils.unpack_tensor(tensor).Unpack(unpacked) self.assertEqual(scalar, unpacked)
def test_predict_flattened(): client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.predict), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = prediction_service.PredictResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.predict( endpoint="endpoint_value", instances=[struct.Value(null_value=struct.NullValue.NULL_VALUE)], parameters=struct.Value(null_value=struct.NullValue.NULL_VALUE), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].endpoint == "endpoint_value" assert args[0].instances == [ struct.Value(null_value=struct.NullValue.NULL_VALUE) ]
def buildSourceProperties(self, control): results = json.dumps(control.get('control_results'), sort_keys=True, indent=4).replace('"',"'") sourceProperties = {'control_id': struct_pb2.Value(string_value=control.get('control_id')), 'results': struct_pb2.Value(string_value=''.join(results)), 'profile': struct_pb2.Value(string_value=control.get('profile')), 'summary': struct_pb2.Value(string_value=control.get('profile_summary'))} return sourceProperties
def _test_bot_events_simple(self, request): self.set_as_bot() self.do_handshake() self.set_as_user() raw_resp = self.app.post( '/prpc/swarming.v1.BotAPI/Events', _encode(request), self._headers) expected = swarming_pb2.BotEventsResponse( events=[ swarming_pb2.BotEvent( event_time=timestamp_pb2.Timestamp(seconds=1262401445), bot=swarming_pb2.Bot( bot_id='bot1', pools=['default'], info=swarming_pb2.BotInfo( supplemental=struct_pb2.Struct( fields={ 'running_time': struct_pb2.Value(number_value=1234.0), 'sleep_streak': struct_pb2.Value(number_value=0), 'started_ts': struct_pb2.Value(number_value=1410990411.11), }), external_ip='192.168.2.2', authenticated_as='bot:whitelisted-ip', version='123', ), dimensions=[ swarming_pb2.StringListPair(key='id', values=['bot1']), swarming_pb2.StringListPair(key='os', values=['Amiga']), swarming_pb2.StringListPair(key='pool', values=['default']), ]), event=swarming_pb2.BOT_NEW_SESSION, ), ]) resp = swarming_pb2.BotEventsResponse() _decode(raw_resp.body, resp) self.assertEqual(unicode(expected), unicode(resp))
async def test_predict_flattened_async(): client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.predict), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = prediction_service.PredictResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.predict( endpoint='endpoint_value', instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], parameters=struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].endpoint == 'endpoint_value' assert args[0].instances == [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] assert args[0].parameters == struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)
def _to_protobuf_value(value: type_utils.PARAMETER_TYPES) -> struct_pb2.Value: """Creates a google.protobuf.struct_pb2.Value message out of a provide value. Args: value: The value to be converted to Value message. Returns: A google.protobuf.struct_pb2.Value message. Raises: ValueError if the given value is not one of the parameter types. """ if isinstance(value, str): return struct_pb2.Value(string_value=value) elif isinstance(value, (int, float)): return struct_pb2.Value(number_value=value) elif isinstance(value, bool): return struct_pb2.Value(bool_value=value) elif isinstance(value, dict): return struct_pb2.Value(struct_value=struct_pb2.Struct( fields={k: _to_protobuf_value(v) for k, v in value.items()})) elif isinstance(value, list): return struct_pb2.Value(list_value=struct_pb2.ListValue( values=[_to_protobuf_value(v) for v in value])) else: raise ValueError('Value must be one of the following types: ' 'str, int, float, bool, dict, and list. Got: ' f'"{value}" of type "{type(value)}".')
def test_pack_scalar_protos(self): scalar = struct_pb2.Value(string_value='my message') tensor = tensor_utils.pack_tensor(scalar) self.assertEqual([], tensor.shape) self.assertLen(tensor.protos.array, 1) unpacked = struct_pb2.Value() self.assertTrue(tensor.protos.array[0].Unpack(unpacked)) self.assertEqual(scalar, unpacked)
def test_list_value_pb(): class Foo(proto.Message): value = proto.Field(struct_pb2.ListValue, number=1) foo = Foo(value=struct_pb2.ListValue(values=[ struct_pb2.Value(string_value="foo"), struct_pb2.Value(string_value="bar"), struct_pb2.Value(bool_value=True), ])) assert foo.value == ["foo", "bar", True]
def test_struct_pb(): class Foo(proto.Message): value = proto.Field(struct_pb2.Struct, number=1) foo = Foo(value=struct_pb2.Struct( fields={ 'foo': struct_pb2.Value(string_value='bar'), 'bacon': struct_pb2.Value(bool_value=True), })) assert foo.value == {'foo': 'bar', 'bacon': True}
def test_struct_pb(): class Foo(proto.Message): value = proto.Field(struct_pb2.Struct, number=1) foo = Foo(value=struct_pb2.Struct( fields={ "foo": struct_pb2.Value(string_value="bar"), "bacon": struct_pb2.Value(bool_value=True), })) assert foo.value == {"foo": "bar", "bacon": True}
def test_experiment_pb(self): hparam_infos = [ api_pb2.HParamInfo( name="param1", display_name="display_name1", description="foo", type=api_pb2.DATA_TYPE_STRING, domain_discrete=struct_pb2.ListValue(values=[ struct_pb2.Value(string_value="a"), struct_pb2.Value(string_value="b"), ]), ), api_pb2.HParamInfo( name="param2", display_name="display_name2", description="bar", type=api_pb2.DATA_TYPE_FLOAT64, domain_interval=api_pb2.Interval(min_value=-100.0, max_value=100.0), ), ] metric_infos = [ api_pb2.MetricInfo( name=api_pb2.MetricName(tag="loss"), dataset_type=api_pb2.DATASET_VALIDATION, ), api_pb2.MetricInfo( name=api_pb2.MetricName(group="train/", tag="acc"), dataset_type=api_pb2.DATASET_TRAINING, ), ] time_created_secs = 314159.0 self.assertEqual( summary.experiment_pb(hparam_infos, metric_infos, time_created_secs=time_created_secs), tf.compat.v1.Summary(value=[ tf.compat.v1.Summary.Value( tag="_hparams_/experiment", tensor=summary._TF_NULL_TENSOR, metadata=tf.compat.v1.SummaryMetadata( plugin_data=tf.compat.v1.SummaryMetadata.PluginData( plugin_name="hparams", content=(plugin_data_pb2.HParamsPluginData( version=0, experiment=api_pb2.Experiment( time_created_secs=time_created_secs, hparam_infos=hparam_infos, metric_infos=metric_infos, ), ).SerializeToString()), )), ) ]), )
def test_predict_flattened_error(): client = PredictionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.predict( prediction_service.PredictRequest(), endpoint="endpoint_value", instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], parameters=struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE), )
def test_pack_proto_arrays(self): array = np.array([ struct_pb2.Value(string_value=message) for message in ['foo', 'bar'] ]) tensor = tensor_utils.pack_tensor(array) self.assertEqual([2], tensor.shape) unpacked = struct_pb2.Value() tensor.protos.array[0].Unpack(unpacked) self.assertEqual(array[0], unpacked) tensor.protos.array[1].Unpack(unpacked) self.assertEqual(array[1], unpacked)
def test_unpack_proto_arrays(self): array = np.array([ struct_pb2.Value(string_value=message) for message in ['foo', 'bar'] ]) tensor = tensor_utils.pack_tensor(array) round_trip = tensor_utils.unpack_tensor(tensor) unpacked = struct_pb2.Value() round_trip[0].Unpack(unpacked) self.assertEqual(array[0], unpacked) round_trip[1].Unpack(unpacked) self.assertEqual(array[1], unpacked)
def test_explain_flattened_error(): client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.explain( prediction_service.ExplainRequest(), endpoint="endpoint_value", instances=[struct.Value(null_value=struct.NullValue.NULL_VALUE)], parameters=struct.Value(null_value=struct.NullValue.NULL_VALUE), deployed_model_id="deployed_model_id_value", )
def test_field_mask_map_diffs(): original = struct_pb2.Struct() modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct() assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = None modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = None assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=2.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct( fields={'bar': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
def test_BotEvent_proto_quarantine(self): # Also test that a bot can belong to two pools. event_key = bot_management.bot_event( event_type=u'bot_connected', bot_id=u'id1', external_ip=u'8.8.4.4', authenticated_as=u'bot:id1.domain', dimensions={ u'id': [u'id1'], u'pool': [u'next', u'previous'] }, state={ u'ram': 65.0, u'quarantined': u'sad bot' }, version=_VERSION, quarantined=True, maintenance_msg=None, task_id=None, task_name=None) actual = swarming_pb2.BotEvent() event_key.get().to_proto(actual) expected = swarming_pb2.BotEvent( event=swarming_pb2.BOT_NEW_SESSION, bot=swarming_pb2.Bot( bot_id=u'id1', pools=[u'next', u'previous'], dimensions=[ swarming_pb2.StringListPair(key=u'id', values=[u'id1']), swarming_pb2.StringListPair(key=u'pool', values=[u'next', u'previous']), ], status=swarming_pb2.QUARANTINED_BY_BOT, status_msg=u'sad bot', info=swarming_pb2.BotInfo( supplemental=struct_pb2.Struct( fields={ u'quarantined': struct_pb2.Value(string_value=u'sad bot'), u'ram': struct_pb2.Value(number_value=65.0), }), version=_VERSION, external_ip=u'8.8.4.4', authenticated_as=u'bot:id1.domain', ), ), ) expected.event_time.FromDatetime(self.now) self.assertEqual(unicode(expected), unicode(actual))
def test_BotEvent_proto_maintenance(self): # Also test a misconfigured bot not in a pool. event_key = _bot_event(event_type=u'bot_connected', bot_id=u'id1', dimensions={u'id': [u'id1']}, maintenance_msg=u'Too hot') actual = swarming_pb2.BotEvent() event_key.get().to_proto(actual) expected = swarming_pb2.BotEvent( event=swarming_pb2.BOT_NEW_SESSION, bot=swarming_pb2.Bot( bot_id=u'id1', dimensions=[ swarming_pb2.StringListPair(key=u'id', values=[u'id1']), ], status=swarming_pb2.OVERHEAD_MAINTENANCE_EXTERNAL, status_msg=u'Too hot', info=swarming_pb2.BotInfo( supplemental=struct_pb2.Struct( fields={ u'ram': struct_pb2.Value(number_value=65), }), version=_VERSION, external_ip=u'8.8.4.4', authenticated_as=u'bot:id1.domain', ), ), ) expected.event_time.FromDatetime(self.now) self.assertEqual(unicode(expected), unicode(actual))
def test_empty_property_value(self): msg = rpc_pb2.ScheduleBuildRequest( builder=dict(project='chromium', bucket='try', builder='linux-rel'), properties=dict(fields=dict(a=struct_pb2.Value())), ) self.assert_invalid( msg, r'properties\.a: value is not set; for null, initialize null_value' )
def test_read(self, mock_stub_class): mock_stub = mock_stub_class.return_value test_session = spanner_pb2.Session(name='test_session') mock_stub.CreateSession.return_value = test_session mock_stub.Read.return_value = result_set_pb2.ResultSet(rows=[ struct_pb2.ListValue( values=[struct_pb2.Value(string_value='test_username')]) ]) mock_stub.StreamingRead.return_value = iter([ result_set_pb2.PartialResultSet( values=[struct_pb2.Value(string_value='test_username')]) ]) test_metrics = {} spanner_probes._read(mock_stub, test_metrics) mock_stub.CreateSession.assert_called_once() mock_stub.Read.assert_called_once() mock_stub.StreamingRead.assert_called_once() mock_stub.DeleteSession.assert_called_once() self.assertGreater(len(test_metrics), 0)
def test_empty_property_value(self): msg = rpc_pb2.UpdateBuildRequest( build=build_pb2.Build( id=1, output=dict(properties=dict(fields=dict(a=struct_pb2.Value()))), ), update_mask=field_mask_pb2.FieldMask(paths=['build.output.properties']), ) self.assert_invalid( msg, r'build\.output\.properties\.a: value is not set; ' 'for null, initialize null_value' )
def DeletePredictionObjects(self, request, context): ''' Endpoint for pod deletion. Arguments: request: request object. context: the request context. Return: Empty Object ''' with status.context(context): pods = map(self._map_object, request.objects) self.dao.delete_pod(pods) return struct_pb2.Value()
def create_finding(self, organization, finding): source_finding = { 'id': finding['id'], 'category': finding['category'], 'asset_ids': finding['asset_ids'], 'source_id': finding['source_id'], 'event_time': timestamp_pb2.Timestamp(seconds=finding['event_time']), 'url': finding['url'], 'properties': { 'fields': {self._replace_dots(key): struct_pb2.Value(string_value=str(value)) for key, value in finding.get('properties', {}).items()} } } return self._security_client.create_finding(organization, source_finding)
def test_field_mask_zero_values(): # Singular Values original = color_pb2.Color(red=0.0) modified = None assert protobuf_helpers.field_mask(original, modified).paths == [] original = None modified = color_pb2.Color(red=0.0) assert protobuf_helpers.field_mask(original, modified).paths == [] # Repeated Values original = struct_pb2.ListValue(values=[]) modified = None assert protobuf_helpers.field_mask(original, modified).paths == [] original = None modified = struct_pb2.ListValue(values=[]) assert protobuf_helpers.field_mask(original, modified).paths == [] # Maps original = struct_pb2.Struct(fields={}) modified = None assert protobuf_helpers.field_mask(original, modified).paths == [] original = None modified = struct_pb2.Struct(fields={}) assert protobuf_helpers.field_mask(original, modified).paths == [] # Oneofs original = struct_pb2.Value(number_value=0.0) modified = None assert protobuf_helpers.field_mask(original, modified).paths == [] original = None modified = struct_pb2.Value(number_value=0.0) assert protobuf_helpers.field_mask(original, modified).paths == []
def _source_properties(self): source_properties = {} properties = [ 'priority', 'summary', 'container_id', 'container_name', 'kubernetes_pod_name', 'severity', 'rule_type' ] for name in properties: value = getattr(self, name) if value is not None: source_properties[name] = \ struct_pb2.Value(string_value=str(value)) self._merge_container_metadata(source_properties) return source_properties
async def Call(self, msg, *args): """Processes messages received by the GRPC server :param msg: The received message :param args: Extra positional arguments :return: Empty protobuf struct, necessary to return for protobuf Empty """ if self.callback: self._pending_tasks.append(asyncio.create_task(self.callback(msg))) else: self._logger.debug( 'Grpclet received data request, but no callback was registered' ) self.msg_recv += 1 self._update_pending_tasks() return struct_pb2.Value()
def PacketTx(self, request, context): """ packet = Ether(request.packet) s = ( '*** PACKET IN ***\n' 'DeviceName: {deviceName}\n' 'DeviceInterface: {deviceInterface}\n' 'OriginatingRule: {originatingRule}\n' 'Packet: \n{packet}\n' ).format( deviceName=request.device_name, deviceInterface=request.device_interface, originatingRule=request.originating_rule, packet=packet.show(dump=True) ) logging.info(s) """ global count count += 1 print("Request number: ", count) """ try: [request.oltPort][request.onuId] cmd = cmds[request.device_name][request.device_interface] except KeyError as _: logging.error("Onu not found!") with manager.connect_ssh( self.obbaaAddress, port=self.obbaaPort, username="******", password="******", hostkey_verify=False, look_for_keys=False, device_params={'name': 'default'}) as m: try: m.dispatch(cmd, filter=('subtree', filter_olt)) except Exception as ex: logging.error( "Failed to dispatch new configuration on OBBAA: {}".format(str(ex))) """ return struct_pb2.Value()
def testCreateFindings(self): #SETUP properties = Properties() testSecuritySource = 'organizations/827482578277/sources/9233151395087538604' cscc = CsccService(properties, testSecuritySource) # GIVEN a mock finding finding = { 'name': 'TESTFINDING123', 'parent': 'organizations/827482578277/sources/9233151395087538604', 'resource_name': 'organizations/827482578277/projects/test-project', 'state': 'INACTIVE', 'category': 'TEST-CATEGORY', 'external_uri': 'https://35.197.241.246/compliance/reporting/nodes/d35f7363-4d0a-40a3-b52d-d92ff1050c33', 'source_properties': { 'control_id': struct_pb2.Value(string_value="cis-gcp-benchmark-vms-4.6"), 'control_title': struct_pb2.Value(string_value='Title'), 'code_description': struct_pb2.Value( string_value= "Instance chef-automate should have disks encrypted with csek" ), 'code_message': struct_pb2.Value( string_value= "expected #has_disks_encrypted_with_csek? to return true, got false" ), 'summary': struct_pb2.Value( string_value= "CIS Google Cloud Platform Foundation Benchmark Level 2"), 'status': struct_pb2.Value(string_value='Fail') }, 'security_marks': {}, 'event_time': cscc.timestamp(datetime.now()), 'create_time': cscc.timestamp(datetime.now()) } # WHEN create finding is called cscc.createFinding(finding) # THEN findings are sent to cscc findingList = cscc.getAllFindings() assert len(findingList) >= 1
def _get_value(param: _pipeline_param.PipelineParam) -> struct_pb2.Value: assert param.value is not None, 'None values should be filtered out.' result = struct_pb2.Value() # TODO(chensun): remove defaulting to 'String' for None param_type once we # fix importer behavior. param_type = type_utils.get_parameter_type(param.param_type or 'String') if (param_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER or param_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE): result.number_value = float(param.value) elif param_type == pipeline_spec_pb2.ParameterType.STRING: result.string_value = param.value elif param_type == pipeline_spec_pb2.ParameterType.BOOLEAN: result.bool_value = param.value elif param_type == pipeline_spec_pb2.ParameterType.LIST: result.list_value.extend(param.value) elif param_type == pipeline_spec_pb2.ParameterType.STRUCT: result.struct_value.update(param.value) else: raise ValueError('Unknown type for PipelineParam {}'.format(param)) return result