def Open(self, req, context): print 'V3 ', 'Open', 'called' # req = talk_pb2.OpenRequest() event_res = talk_pb2.OpenResponse() event_res.code = 1000000 event_res.reason = 'success' answer_meta = struct.Struct() answer_meta["play1"] = "play1" answer_meta["play2"] = "play2" answer_meta["play3"] = "play3" answer_meta.get_or_create_struct("audio1")["name"] = "media_play1" answer_meta.get_or_create_struct( "audio1")["url"] = "htpp://101.123.212.321:232/media/player_1.mp3" answer_meta.get_or_create_struct("audio1")["duration"] = "00:10:12" answer_meta.get_or_create_struct("audio2")["name"] = "media_play2" answer_meta.get_or_create_struct( "audio2")["url"] = "htpp://101.123.212.321:232/media/player_1.mp3" answer_meta.get_or_create_struct("audio2")["duration"] = "00:00:15" event_res.meta.CopyFrom(answer_meta) answer_context = struct.Struct() answer_context["context1"] = "context_body1" answer_context["context2"] = "context_body2" answer_context["context3"] = "context_body3" event_res.context.CopyFrom(answer_context) # DO NOTHING return event_res
def test_field_mask_map_diffs(): original = struct_pb2.Struct() modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct() assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = None modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = None assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=2.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields'] original = struct_pb2.Struct( fields={'foo': struct_pb2.Value(number_value=1.0)}) modified = struct_pb2.Struct( fields={'bar': struct_pb2.Value(number_value=1.0)}) assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
def test_send_to_bq(self): payloads = [] def json_request(url, method, payload, scopes, deadline): self.assertEqual( 'https://www.googleapis.com/bigquery/v2/projects/sample-app/datasets/' 'swarming/tables/foo/insertAll', url) payloads.append(payload) self.assertEqual('POST', method) self.assertEqual(bq_state.bqh.INSERT_ROWS_SCOPE, scopes) self.assertEqual(600, deadline) return {'insertErrors': []} self.mock(bq_state.net, 'json_request', json_request) rows = [ ('key1', struct_pb2.Struct()), ('key2', struct_pb2.Struct()), ] self.assertEqual(0, bq_state.send_to_bq('foo', rows)) expected = [ { 'ignoreUnknownValues': False, 'kind': 'bigquery#tableDataInsertAllRequest', 'skipInvalidRows': True, }, ] actual_rows = payloads[0].pop('rows') self.assertEqual(expected, payloads) self.assertEqual(2, len(actual_rows))
def testStruct(self): struct = struct_pb2.Struct() struct_class = struct.__class__ struct['key1'] = 5 struct['key2'] = 'abc' struct['key3'] = True struct.get_or_create_struct('key4')['subkey'] = 11.0 struct_list = struct.get_or_create_list('key5') struct_list.extend([6, 'seven', True, False, None]) struct_list.add_struct()['subkey2'] = 9 self.assertTrue(isinstance(struct, well_known_types.Struct)) self.assertEqual(5, struct['key1']) self.assertEqual('abc', struct['key2']) self.assertIs(True, struct['key3']) self.assertEqual(11, struct['key4']['subkey']) inner_struct = struct_class() inner_struct['subkey2'] = 9 self.assertEqual([6, 'seven', True, False, None, inner_struct], list(struct['key5'].items())) serialized = struct.SerializeToString() struct2 = struct_pb2.Struct() struct2.ParseFromString(serialized) self.assertEqual(struct, struct2) self.assertTrue(isinstance(struct2, well_known_types.Struct)) self.assertEqual(5, struct2['key1']) self.assertEqual('abc', struct2['key2']) self.assertIs(True, struct2['key3']) self.assertEqual(11, struct2['key4']['subkey']) self.assertEqual([6, 'seven', True, False, None, inner_struct], list(struct2['key5'].items())) struct_list = struct2['key5'] self.assertEqual(6, struct_list[0]) self.assertEqual('seven', struct_list[1]) self.assertEqual(True, struct_list[2]) self.assertEqual(False, struct_list[3]) self.assertEqual(None, struct_list[4]) self.assertEqual(inner_struct, struct_list[5]) struct_list[1] = 7 self.assertEqual(7, struct_list[1]) struct_list.add_list().extend([1, 'two', True, False, None]) self.assertEqual([1, 'two', True, False, None], list(struct_list[6].items())) text_serialized = str(struct) struct3 = struct_pb2.Struct() text_format.Merge(text_serialized, struct3) self.assertEqual(struct, struct3) struct.get_or_create_struct('key3')['replace'] = 12 self.assertEqual(12, struct['key3']['replace'])
def testStructAssignment(self): # Tests struct assignment from another struct s1 = struct_pb2.Struct() s2 = struct_pb2.Struct() for value in [1, 'a', [1], ['a'], {'a': 'b'}]: s1['x'] = value s2['x'] = s1['x'] self.assertEqual(s1['x'], s2['x'])
def test_send_to_bq_fail(self): # Test the failure code path. payloads = [] def json_request(url, method, payload, scopes, deadline): self.assertEqual( 'https://www.googleapis.com/bigquery/v2/projects/sample-app/datasets/' 'swarming/tables/foo/insertAll', url) first = not payloads payloads.append(payload) self.assertEqual('POST', method) self.assertEqual(bq_state.bqh.INSERT_ROWS_SCOPE, scopes) self.assertEqual(600, deadline) # Return an error on the first call. if first: return { 'insertErrors': [ { 'index': 0, 'errors': [ { 'reason': 'sadness', 'message': 'Oh gosh', }, ], }, ], } return {'insertErrors': []} self.mock(bq_state.net, 'json_request', json_request) rows = [ ('key1', struct_pb2.Struct()), ('key2', struct_pb2.Struct()), ] self.assertEqual(1, bq_state.send_to_bq('foo', rows)) self.assertEqual(2, len(payloads), payloads) expected = { 'ignoreUnknownValues': False, 'kind': 'bigquery#tableDataInsertAllRequest', 'skipInvalidRows': True, } actual_rows = payloads[0].pop('rows') self.assertEqual(expected, payloads[0]) self.assertEqual(2, len(actual_rows)) expected = { 'ignoreUnknownValues': False, 'kind': 'bigquery#tableDataInsertAllRequest', 'skipInvalidRows': True, } actual_rows = payloads[1].pop('rows') self.assertEqual(expected, payloads[1]) self.assertEqual(1, len(actual_rows))
def getResponseDialogFlow(session_id: str, text_to_be_analyzed: str, event: str, user: users.User, context_param=None): clear_message_context = False contexts = get_contexts(config.DIALOG_FLOW_JSON['project_id'], session_id, "user") if not contexts: parameters = struct_pb2.Struct() if user: parameters['login'] = user.getLogin() parameters['name'] = user.getNameAndGerb() else: parameters['login'] = session_id create_context(config.DIALOG_FLOW_JSON['project_id'], session_id, "user", 1, parameters) if context_param: parameters = struct_pb2.Struct() for key in context_param.keys(): parameters[key] = context_param.get(key) create_context(config.DIALOG_FLOW_JSON['project_id'], session_id, "message", 1, parameters) clear_message_context = True session = session_client.session_path( config.DIALOG_FLOW_JSON['project_id'], session_id) query_input = None if event: event_input = dialogflow_v2.types.EventInput(name=event, language_code='ru-RU') query_input = dialogflow_v2.types.QueryInput(event=event_input) else: text_input = dialogflow_v2.types.TextInput(text=text_to_be_analyzed, language_code='ru-RU') query_input = dialogflow_v2.types.QueryInput(text=text_input) try: response = session_client.detect_intent(session=session, query_input=query_input) #print(response) except InvalidArgument: raise finally: pass if clear_message_context: delete_context(config.DIALOG_FLOW_JSON['project_id'], session_id, "message") return response.query_result
def GetAnalyzerInfo(self, request, context): policies: List[proto.PolicyInfo] = [] for policy in self.__policies: enforcement_level = (policy.enforcement_level if policy.enforcement_level is not None else self.__policy_pack_enforcement_level) schema = {} if policy.config_schema is not None: if policy.config_schema.properties: properties = struct_pb2.Struct() for k, v in policy.config_schema.properties.items(): # pylint: disable=unsupported-assignment-operation properties[k] = v schema["properties"] = properties if policy.config_schema.required: schema["required"] = policy.config_schema.required policies.append( proto.PolicyInfo( name=policy.name, description=policy.description, enforcementLevel=self._map_enforcement_level( enforcement_level), configSchema=proto.PolicyConfigSchema( **schema) if schema else None, )) initial_config = {} if self.__initial_config is not None: normalized_config = _normalize_config(self.__initial_config) for key, val in normalized_config.items(): config = {} if val.enforcement_level is not None: config["enforcementLevel"] = self._map_enforcement_level( val.enforcement_level) if val.properties: properties = struct_pb2.Struct() for k, v in val.properties.items(): # pylint: disable=unsupported-assignment-operation properties[k] = v config["properties"] = properties if config: initial_config[key] = proto.PolicyConfig(**config) return proto.AnalyzerInfo( name=self.__policy_pack_name, version=self.__policy_pack_version, supportsConfig=True, policies=policies, initialConfig=initial_config, )
def to_proto(self, value) -> struct_pb2.Struct: # We got a proto, or else something we sent originally. # Preserve the instance we have. if isinstance(value, struct_pb2.Struct): return value if isinstance(value, maps.MapComposite): return struct_pb2.Struct(fields={k: v for k, v in value.pb.items()},) # We got a dict (or something dict-like); convert it. answer = struct_pb2.Struct( fields={ k: self._marshal.to_proto(struct_pb2.Value, v) for k, v in value.items() } ) return answer
def test_str_field(): d = { 'bar': 'baz' } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert struct['bar'] == 'baz'
def GenTests(api): yield api.test( 'validators', api.properties(validation='analyze', validation_name='dart analyze', android_sdk_license='android_license', android_sdk_preview_license='android_preview_license'), api.repo_util.flutter_environment_data()) props = struct_pb2.Struct() props['task_name'] = 'abc' build = build_pb2.Build(input=build_pb2.Build.Input(properties=props)) passed_batch_res = builds_service_pb2.BatchResponse(responses=[ dict(schedule_build=dict( id=build.id, builder=build.builder, input=build.input)) ]) yield api.test( 'shards', api.properties(shard='framework_tests', subshards=['0', '1_last']), api.repo_util.flutter_environment_data(), api.buildbucket.simulated_schedule_output(passed_batch_res)) err_batch_res = builds_service_pb2.BatchResponse(responses=[ dict(error=dict( code=1, message='bad', ), ), ], ) yield api.test( 'shards_fail', api.properties(shard='framework_tests', subshards=['0', '1_last']), api.repo_util.flutter_environment_data(), api.buildbucket.simulated_schedule_output(err_batch_res))
def test_bool_field(): d = { 'bar': True } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert struct['bar'] is True
def test_float_field(): d = { 'bar': 0.1 } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert struct['bar'] == 0.1
def to_proto(self): analyzers = list(map(lambda a: a.to_proto(), self.analyzers)) evaluators = list(map(lambda e: e.to_proto(), self.evaluators)) msg = ExperimentConfigMsg( id=self.id, task=self.task.to_proto(), backend=self.backend.to_proto(), dataset=self.dataset.to_proto(), analyzers=analyzers, evaluators=evaluators) msg.root_uri = self.root_uri msg.analyze_uri = self.analyze_uri msg.chip_uri = self.chip_uri msg.train_uri = self.train_uri msg.predict_uri = self.predict_uri msg.eval_uri = self.eval_uri msg.bundle_uri = self.bundle_uri if self.custom_config: msg.MergeFrom( ExperimentConfigMsg( custom_config=json_format.ParseDict( { 'config': json.dumps(self.custom_config) }, struct_pb2.Struct()))) return msg
def dict_to_struct(dict_obj): # type: (dict) -> struct_pb2.Struct try: return json_format.ParseDict(dict_obj, struct_pb2.Struct()) except json_format.ParseError: logging.error('Failed to parse dict %s', dict_obj) raise
def change_struct_type(value): if isinstance(value, dict): change_value = struct_pb2.Struct() change_value.update(value) return change_value else: return value
def test_empty_list(): d = { 'bar': [] } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert list(struct['bar']) == []
def _to_protobuf_value(value: type_utils.PARAMETER_TYPES) -> struct_pb2.Value: """Creates a google.protobuf.struct_pb2.Value message out of a provide value. Args: value: The value to be converted to Value message. Returns: A google.protobuf.struct_pb2.Value message. Raises: ValueError if the given value is not one of the parameter types. """ if isinstance(value, str): return struct_pb2.Value(string_value=value) elif isinstance(value, (int, float)): return struct_pb2.Value(number_value=value) elif isinstance(value, bool): return struct_pb2.Value(bool_value=value) elif isinstance(value, dict): return struct_pb2.Value(struct_value=struct_pb2.Struct( fields={k: _to_protobuf_value(v) for k, v in value.items()})) elif isinstance(value, list): return struct_pb2.Value(list_value=struct_pb2.ListValue( values=[_to_protobuf_value(v) for v in value])) else: raise ValueError('Value must be one of the following types: ' 'str, int, float, bool, dict, and list. Got: ' f'"{value}" of type "{type(value)}".')
def test_int_field(): d = { 'bar': 42 } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert struct['bar'] == 42
def test_null_field(): d = { 'bar': None } struct = struct_pb2.Struct() copy_dict_to_struct(d, struct) assert struct['bar'] is None
def test_secret_push_up(self): secret_value = {rpc._special_sig_key: rpc._special_secret_sig, "value": "a secret value" } all_props = struct_pb2.Struct() all_props["regular"] = "a normal value" all_props["list"] = ["a normal value", "another value", secret_value] all_props["map"] = {"regular": "a normal value", "secret": secret_value} all_props["mapWithList"] = {"regular": "a normal value", "list": ["a normal value", secret_value]} all_props["listWithMap"] = [{"regular": "a normal value", "secret": secret_value}] val = rpc.deserialize_properties(all_props) self.assertEqual(all_props["regular"], val["regular"]) self.assertIsInstance(val["list"], dict) self.assertEqual(val["list"][rpc._special_sig_key], rpc._special_secret_sig) self.assertEqual(val["list"]["value"][0], "a normal value") self.assertEqual(val["list"]["value"][1], "another value") self.assertEqual(val["list"]["value"][2], "a secret value") self.assertIsInstance(val["map"], dict) self.assertEqual(val["map"][rpc._special_sig_key], rpc._special_secret_sig) self.assertEqual(val["map"]["value"]["regular"], "a normal value") self.assertEqual(val["map"]["value"]["secret"], "a secret value") self.assertIsInstance(val["mapWithList"], dict) self.assertEqual(val["mapWithList"][rpc._special_sig_key], rpc._special_secret_sig) self.assertEqual(val["mapWithList"]["value"]["regular"], "a normal value") self.assertEqual(val["mapWithList"]["value"]["list"][0], "a normal value") self.assertEqual(val["mapWithList"]["value"]["list"][1], "a secret value") self.assertIsInstance(val["listWithMap"], dict) self.assertEqual(val["listWithMap"][rpc._special_sig_key], rpc._special_secret_sig) self.assertEqual(val["listWithMap"]["value"][0]["regular"], "a normal value") self.assertEqual(val["listWithMap"]["value"][0]["secret"], "a secret value")
def sendGreetings(project_id, session_id, text, language_code): session_client = dialogflow.SessionsClient() session = session_client.session_path(project_id, session_id) parameters = struct_pb2.Struct() parameters["name"] = 'Fernando' parameters["surname"] = 'Luz' query_input = { 'event': { "name": "greetPerson", "parameters": parameters, "language_code": language_code } } response = session_client.detect_intent( session=session, query_input=query_input) print('=' * 20) print('Query text: {}'.format(response.query_result.query_text)) print('Detected intent: {} (confidence: {})\n'.format( response.query_result.intent.display_name, response.query_result.intent_detection_confidence)) print('Fulfillment text: {}\n'.format( response.query_result.fulfillment_text)) return str(response.query_result.fulfillment_text)
def __enter__(self) -> resources_pb2.Input: my_concept_id = "my-concept-id-" + uuid.uuid4().hex my_concept_name = "my concept name " + uuid.uuid4().hex image_metadata = struct_pb2.Struct() image_metadata.update({ "some-key": "some-value", "another-key": { "inner-key": "inner-value" } }) post_response = self._stub.PostInputs( service_pb2.PostInputsRequest(inputs=[ resources_pb2.Input(data=resources_pb2.Data( image=resources_pb2.Image(url=DOG_IMAGE_URL, allow_duplicate_url=True), concepts=[ resources_pb2.Concept( id=my_concept_id, name=my_concept_name, value=1) ], metadata=image_metadata, geo=resources_pb2.Geo(geo_point=resources_pb2.GeoPoint( longitude=44, latitude=55)), ), ) ]), metadata=metadata(), ) raise_on_failure(post_response) self._input = post_response.inputs[0] wait_for_inputs_upload(self._stub, metadata(), [self._input.id]) return self._input
def test_metadata(): expected_metadata = struct_pb2.Struct() future, _, _ = make_operation_future( [make_operation_proto(metadata=expected_metadata)] ) assert future.metadata == expected_metadata
def test_empty_struct(self): """ Tests that the empty Struct deserializes to {}. """ empty = struct_pb2.Struct() deserialized = rpc.deserialize_resource_props(empty) self.assertDictEqual({}, deserialized)
def test_none_list_field_reverse(): struct = struct_pb2.Struct() bar = struct.get_or_create_list('bar') bar.append(None) d = {} copy_struct_to_dict(struct, d) assert d['bar'] == [None]
def build_importer_spec_for_task( task: pipeline_task.PipelineTask ) -> pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec: """Builds ImporterSpec for a pipeline task. Args: task: The task to build a ComponentSpec for. Returns: A ImporterSpec object for the task. """ type_schema = type_utils.get_artifact_type_schema( task.importer_spec.type_schema) importer_spec = pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec( type_schema=type_schema, reimport=task.importer_spec.reimport) if task.importer_spec.metadata: metadata_protobuf_struct = struct_pb2.Struct() metadata_protobuf_struct.update(task.importer_spec.metadata) importer_spec.metadata.CopyFrom(metadata_protobuf_struct) if isinstance(task.importer_spec.artifact_uri, pipeline_channel.PipelineParameterChannel): importer_spec.artifact_uri.runtime_parameter = 'uri' elif isinstance(task.importer_spec.artifact_uri, str): importer_spec.artifact_uri.constant.string_value = task.importer_spec.artifact_uri return importer_spec
def pack_Struct(**kwargs): """Returns a struct containing the values indicated by kwargs. """ msg = struct_pb2.Struct() for key, value in kwargs.items(): msg[key] = value # pylint: disable=unsubscriptable-object return msg
async def serialize_properties( inputs: 'Inputs', property_deps: Dict[str, List['Resource']], input_transformer: Optional[Callable[[str], str]] = None) -> struct_pb2.Struct: """ Serializes an arbitrary Input bag into a Protobuf structure, keeping track of the list of dependent resources in the `deps` list. Serializing properties is inherently async because it awaits any futures that are contained transitively within the input bag. """ struct = struct_pb2.Struct() for k, v in inputs.items(): deps = [] result = await serialize_property(v, deps, input_transformer) # We treat properties that serialize to None as if they don't exist. if result is not None: # While serializing to a pb struct, we must "translate" all key names to be what the engine is going to # expect. Resources provide the "transform" function for doing this. translated_name = k if input_transformer is not None: translated_name = input_transformer(k) log.debug( f"top-level input property translated: {k} -> {translated_name}" ) # pylint: disable=unsupported-assignment-operation struct[translated_name] = result property_deps[translated_name] = deps return struct
def _test_bot_events_simple(self, request): self.set_as_bot() self.do_handshake() self.set_as_user() raw_resp = self.app.post( '/prpc/swarming.v1.BotAPI/Events', _encode(request), self._headers) expected = swarming_pb2.BotEventsResponse( events=[ swarming_pb2.BotEvent( event_time=timestamp_pb2.Timestamp(seconds=1262401445), bot=swarming_pb2.Bot( bot_id='bot1', pools=['default'], info=swarming_pb2.BotInfo( supplemental=struct_pb2.Struct( fields={ 'running_time': struct_pb2.Value(number_value=1234.0), 'sleep_streak': struct_pb2.Value(number_value=0), 'started_ts': struct_pb2.Value(number_value=1410990411.11), }), external_ip='192.168.2.2', authenticated_as='bot:whitelisted-ip', version='123', ), dimensions=[ swarming_pb2.StringListPair(key='id', values=['bot1']), swarming_pb2.StringListPair(key='os', values=['Amiga']), swarming_pb2.StringListPair(key='pool', values=['default']), ]), event=swarming_pb2.BOT_NEW_SESSION, ), ]) resp = swarming_pb2.BotEventsResponse() _decode(raw_resp.body, resp) self.assertEqual(unicode(expected), unicode(resp))