def build_messages(
            self, payloads: List[Union[str, dict]]) -> List[Intent.Message]:
        messages = []
        for payload in payloads:
            if isinstance(payload, str):
                message = Intent.Message(
                    text=Intent.Message.Text(text=[payload.strip()]),
                    platform=self.platform,
                )
            elif isinstance(payload, list):
                message = Intent.Message(
                    text=Intent.Message.Text(
                        text=[text.strip() for text in payload]),
                    platform=self.platform,
                )
            elif isinstance(payload, dict):
                payload_struct = Struct()
                if 'payload' in payload:
                    payload_struct.update({
                        self.PLATFORMS[self.platform].lower():
                        payload['payload']
                    })
                elif 'choice_buttons' in payload:
                    _payload = self.build_choice_buttons(
                        payload['choice_buttons'])
                    payload_struct.update(
                        {self.PLATFORMS[self.platform].lower(): _payload})
                else:
                    payload_struct.update(payload)

                message = Intent.Message(
                    payload=payload_struct,
                    platform=self.platform,
                )
            else:
                raise AttributeError(f'Invalid payload "{payload}"')

            messages.append(message)

        return messages
    def _execute_update_helper(self, count=0):
        from google.protobuf.struct_pb2 import Struct
        from google.cloud.spanner_v1.proto.result_set_pb2 import (
            ResultSet,
            ResultSetStats,
        )
        from google.cloud.spanner_v1.proto.transaction_pb2 import TransactionSelector
        from google.cloud.spanner_v1._helpers import _make_value_pb

        MODE = 2  # PROFILE
        stats_pb = ResultSetStats(row_count_exact=1)
        database = _Database()
        api = database.spanner_api = self._make_spanner_api()
        api.execute_sql.return_value = ResultSet(stats=stats_pb)
        session = _Session(database)
        transaction = self._make_one(session)
        transaction._transaction_id = self.TRANSACTION_ID
        transaction._execute_sql_count = count

        row_count = transaction.execute_update(
            DML_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE
        )

        self.assertEqual(row_count, 1)

        expected_transaction = TransactionSelector(id=self.TRANSACTION_ID)
        expected_params = Struct(
            fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()}
        )

        api.execute_sql.assert_called_once_with(
            self.SESSION_NAME,
            DML_QUERY_WITH_PARAM,
            transaction=expected_transaction,
            params=expected_params,
            param_types=PARAM_TYPES,
            query_mode=MODE,
            seqno=count,
            metadata=[("google-cloud-resource-prefix", database.name)],
        )

        self.assertEqual(transaction._execute_sql_count, count + 1)
Exemple #3
0
def deserialize_properties(props_struct: struct_pb2.Struct) -> Any:
    """
    Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal
    Python types.
    """
    # Check out this link for details on what sort of types Protobuf is going to generate:
    # https://developers.google.com/protocol-buffers/docs/reference/python-generated
    #
    # We assume that we are deserializing properties that we got from a Resource RPC endpoint,
    # which has type `Struct` in our gRPC proto definition.
    if _special_sig_key in props_struct:
        if props_struct[_special_sig_key] == _special_asset_sig:
            # This is an asset. Re-hydrate this object into an Asset.
            if "path" in props_struct:
                return known_types.new_file_asset(props_struct["path"])
            if "text" in props_struct:
                return known_types.new_string_asset(props_struct["text"])
            if "uri" in props_struct:
                return known_types.new_remote_asset(props_struct["uri"])
            raise AssertionError("Invalid asset encountered when unmarshaling resource property")
        elif props_struct[_special_sig_key] == _special_archive_sig:
            # This is an archive. Re-hydrate this object into an Archive.
            if "assets" in props_struct:
                return known_types.new_asset_archive(deserialize_property(props_struct["assets"]))
            if "path" in props_struct:
                return known_types.new_file_archive(props_struct["path"])
            if "uri" in props_struct:
                return known_types.new_remote_archive(props_struct["uri"])
        elif props_struct[_special_sig_key] == _special_secret_sig:
            raise AssertionError("this version of the Pulumi SDK does not support first-class secrets")

        raise AssertionError("Unrecognized signature when unmarshaling resource property")

    # Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways.
    output = {}
    for k, v in list(props_struct.items()):
        value = deserialize_property(v)
        # We treat values that deserialize to "None" as if they don't exist.
        if value is not None:
            output[k] = value

    return output
Exemple #4
0
    def test_log_proto_defaults(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value

        message = Struct(fields={"foo": Value(bool_value=True)})
        ENTRIES = [
            {
                "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME),
                "protoPayload": json.loads(MessageToJson(message)),
                "resource": {"type": "global", "labels": {}},
            }
        ]
        client = _Client(self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = self._make_one(self.LOGGER_NAME, client=client)

        logger.log_proto(message)

        self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
Exemple #5
0
    def test_log_proto_w_explicit_client_labels_severity_httpreq(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value
        message = Struct(fields={'foo': Value(bool_value=True)})
        DEFAULT_LABELS = {'foo': 'spam'}
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        IID = 'IID'
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        ENTRIES = [{
            'logName': 'projects/%s/logs/%s' % (
                self.PROJECT, self.LOGGER_NAME),
            'protoPayload': json.loads(MessageToJson(message)),
            'resource': {
                'type': 'global',
            },
            'labels': LABELS,
            'insertId': IID,
            'severity': SEVERITY,
            'httpRequest': REQUEST,
        }]
        client1 = _Client(self.PROJECT)
        client2 = _Client(self.PROJECT)
        api = client2.logging_api = _DummyLoggingAPI()
        logger = self._make_one(self.LOGGER_NAME, client=client1,
                                labels=DEFAULT_LABELS)

        logger.log_proto(message, client=client2, labels=LABELS,
                         insert_id=IID, severity=SEVERITY,
                         http_request=REQUEST)

        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, None, None, None))
    def test_log_proto_explicit(self):
        import datetime
        from google.cloud.logging.resource import Resource
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        message = Struct(fields={'foo': Value(bool_value=True)})
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        IID = 'IID'
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        TRACE = '12345678-1234-5678-1234-567812345678'
        SPANID = '000000000000004a'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        RESOURCE = Resource(type='gae_app',
                            labels={
                                'module_id': 'default',
                                'version_id': 'test',
                            })
        client = _Client(project=self.PROJECT, connection=_make_credentials())
        logger = _Logger()
        batch = self._make_one(logger, client=client)
        batch.log_proto(message,
                        labels=LABELS,
                        insert_id=IID,
                        severity=SEVERITY,
                        http_request=REQUEST,
                        timestamp=TIMESTAMP,
                        resource=RESOURCE,
                        trace=TRACE,
                        span_id=SPANID)
        self.assertEqual(batch.entries,
                         [('proto', message, LABELS, IID, SEVERITY, REQUEST,
                           TIMESTAMP, RESOURCE, TRACE, SPANID)])
Exemple #7
0
    def test_context_mgr_success(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value
        from google.cloud.logging.logger import Logger
        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        message = Struct(fields={'foo': Value(bool_value=True)})
        DEFAULT_LABELS = {'foo': 'spam'}
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = Logger('logger_name', client, labels=DEFAULT_LABELS)
        RESOURCE = {
            'type': 'global',
        }
        ENTRIES = [
            {'textPayload': TEXT, 'httpRequest': REQUEST},
            {'jsonPayload': STRUCT, 'labels': LABELS},
            {'protoPayload': json.loads(MessageToJson(message)),
             'severity': SEVERITY},
        ]
        batch = self._make_one(logger, client=client)

        with batch as other:
            other.log_text(TEXT, http_request=REQUEST)
            other.log_struct(STRUCT, labels=LABELS)
            other.log_proto(message, severity=SEVERITY)

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, logger.full_name, RESOURCE, DEFAULT_LABELS))
Exemple #8
0
    def test_context_mgr_failure(self):
        import datetime
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value

        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        IID = 'IID'
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        message = Struct(fields={'foo': Value(bool_value=True)})
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = _Logger()
        UNSENT = [
            ('text', TEXT, None, IID, None, None, TIMESTAMP),
            ('struct', STRUCT, None, None, SEVERITY, None, None),
            ('proto', message, LABELS, None, None, REQUEST, None),
        ]
        batch = self._make_one(logger, client=client)

        try:
            with batch as other:
                other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP)
                other.log_struct(STRUCT, severity=SEVERITY)
                other.log_proto(message, labels=LABELS, http_request=REQUEST)
                raise _Bugout()
        except _Bugout:
            pass

        self.assertEqual(list(batch.entries), UNSENT)
        self.assertIsNone(api._write_entries_called_with)
Exemple #9
0
    def test_log_proto_w_implicit_client(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value
        message = Struct(fields={'foo': Value(bool_value=True)})
        ENTRIES = [{
            'logName': 'projects/%s/logs/%s' % (
                self.PROJECT, self.LOGGER_NAME),
            'protoPayload': json.loads(MessageToJson(message)),
            'resource': {
                'type': 'global',
            },
        }]
        client = _Client(self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = self._make_one(self.LOGGER_NAME, client=client)

        logger.log_proto(message)

        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, None, None, None))
Exemple #10
0
    def test_struct_converts_primitive_fields(self):
        converted = dict_to_protobuf(
            Struct,
            json.loads("""
{
  "field1": "value1",
  "field2": 3,
  "field3": 1.0,
  "field4": true,
  "field5": null
} 
        """))

        s = Struct()
        s['field1'] = 'value1'
        s['field2'] = 3
        s['field3'] = 1.0
        s['field4'] = True
        s['field5'] = None

        assert converted == s
Exemple #11
0
 def test_log_proto_explicit(self):
     from google.protobuf.struct_pb2 import Struct, Value
     message = Struct(fields={'foo': Value(bool_value=True)})
     LABELS = {'foo': 'bar', 'baz': 'qux'}
     IID = 'IID'
     SEVERITY = 'CRITICAL'
     METHOD = 'POST'
     URI = 'https://api.example.com/endpoint'
     STATUS = '500'
     REQUEST = {
         'requestMethod': METHOD,
         'requestUrl': URI,
         'status': STATUS,
     }
     client = _Client(project=self.PROJECT, connection=object())
     logger = _Logger()
     batch = self._make_one(logger, client=client)
     batch.log_proto(message, labels=LABELS, insert_id=IID,
                     severity=SEVERITY, http_request=REQUEST)
     self.assertEqual(batch.entries,
                      [('proto', message, LABELS, IID, SEVERITY, REQUEST)])
def to_value(value):
    if value is None:
        return Value(null_value=NULL_VALUE)
    elif isinstance(value, bool):
        # This check needs to happen before isinstance(value, int),
        # isinstance(value, int) returns True when value is bool.
        return Value(bool_value=value)
    elif isinstance(value, six.integer_types) or isinstance(value, float):
        return Value(number_value=value)
    elif isinstance(value, six.string_types) or isinstance(
            value, six.text_type):
        return Value(string_value=value)
    elif isinstance(value, dict):
        return Value(struct_value=Struct(
            fields={k: to_value(v)
                    for k, v in value.items()}))
    elif isinstance(value, list):
        return Value(list_value=ListValue(
            values=[to_value(item) for item in value]))
    else:
        raise ValueError('Unsupported data type: {}'.format(type(value)))
Exemple #13
0
    def test_context_mgr_failure(self):
        import datetime
        from google.protobuf.struct_pb2 import Struct
        from google.protobuf.struct_pb2 import Value
        from google.cloud.logging import TextEntry
        from google.cloud.logging import StructEntry
        from google.cloud.logging import ProtobufEntry

        TEXT = "This is the entry text"
        STRUCT = {"message": TEXT, "weather": "partly cloudy"}
        LABELS = {"foo": "bar", "baz": "qux"}
        IID = "IID"
        SEVERITY = "CRITICAL"
        METHOD = "POST"
        URI = "https://api.example.com/endpoint"
        STATUS = "500"
        REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS}
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        message = Struct(fields={"foo": Value(bool_value=True)})
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = _Logger()
        UNSENT = [
            TextEntry(payload=TEXT, insert_id=IID, timestamp=TIMESTAMP),
            StructEntry(payload=STRUCT, severity=SEVERITY),
            ProtobufEntry(payload=message, labels=LABELS, http_request=REQUEST),
        ]
        batch = self._make_one(logger, client=client)

        try:
            with batch as other:
                other.log_text(TEXT, insert_id=IID, timestamp=TIMESTAMP)
                other.log_struct(STRUCT, severity=SEVERITY)
                other.log_proto(message, labels=LABELS, http_request=REQUEST)
                raise _Bugout()
        except _Bugout:
            pass

        self.assertEqual(list(batch.entries), UNSENT)
        self.assertIsNone(api._write_entries_called_with)
Exemple #14
0
    def test_commit_w_bound_client(self):
        import json
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value
        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        message = Struct(fields={'foo': Value(bool_value=True)})
        IID1 = 'IID1'
        IID2 = 'IID2'
        IID3 = 'IID3'
        RESOURCE = {
            'type': 'global',
        }
        ENTRIES = [
            {
                'textPayload': TEXT,
                'insertId': IID1
            },
            {
                'jsonPayload': STRUCT,
                'insertId': IID2
            },
            {
                'protoPayload': json.loads(MessageToJson(message)),
                'insertId': IID3
            },
        ]
        client = _Client(project=self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = _Logger()
        batch = self._makeOne(logger, client=client)

        batch.log_text(TEXT, insert_id=IID1)
        batch.log_struct(STRUCT, insert_id=IID2)
        batch.log_proto(message, insert_id=IID3)
        batch.commit()

        self.assertEqual(list(batch.entries), [])
        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, logger.path, RESOURCE, None))
def detect_intent(request):
    response = {
        'status_code': 500,
        'message': 'Failed to fetch data from dialogflow.',
        'data': []
    }

    try:
        request_json = request.get_json(silent=True, force=True)

        project_id = "build-agent-local"
        session_client = dialogflow.SessionsClient.from_service_account_json('key.json')

        payload = {'project_id': project_id, 'session_id': request_json.get("session_id")}
        struct_pb = Struct(fields={
            key: Value(string_value=value) for key, value in payload.items()
        })
        params = dialogflow.types.QueryParameters(
            time_zone="PST", payload=struct_pb)
        session = session_client.session_path(project_id, request_json.get("session_id"))

        text_input = dialogflow.types.TextInput(
            text=request_json.get("text"), language_code='en-US')
        query_input = dialogflow.types.QueryInput(text=text_input)
        df_response = session_client.detect_intent(
            session=session, query_input=query_input, query_params=params)

        # Convert proto object and serialize it to a json format string.
        json_obj = MessageToJson(df_response)

        result = json.loads(json_obj)
        response['status_code'] = 200
        response['data'] = result
        response['message'] = "Successfully fetched response from dialogflow."
    except Exception as e:
        print(e)

    return jsonify(response)
Exemple #16
0
    def test_from_pb_w_unknown_metadata(self):
        from google.longrunning import operations_pb2
        from google.protobuf.any_pb2 import Any
        from google.protobuf.json_format import ParseDict
        from google.protobuf.struct_pb2 import Struct
        from google.cloud._testing import _Monkey
        from google.cloud import operation as MUT

        type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name, )
        client = _Client()
        meta = ParseDict({'foo': 'Bar'}, Struct())
        metadata_pb = Any(type_url=type_url, value=meta.SerializeToString())
        operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME,
                                                metadata=metadata_pb)
        klass = self._get_target_class()

        with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}):
            operation = klass.from_pb(operation_pb, client)

        self.assertEqual(operation.name, self.OPERATION_NAME)
        self.assertIs(operation.client, client)
        self.assertEqual(operation.metadata, meta)
        self.assertEqual(operation.caller_metadata, {})
Exemple #17
0
    def test_context_mgr_failure(self):
        from google.protobuf.struct_pb2 import Struct, Value
        TEXT = 'This is the entry text'
        STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
        LABELS = {'foo': 'bar', 'baz': 'qux'}
        IID = 'IID'
        SEVERITY = 'CRITICAL'
        METHOD = 'POST'
        URI = 'https://api.example.com/endpoint'
        STATUS = '500'
        REQUEST = {
            'requestMethod': METHOD,
            'requestUrl': URI,
            'status': STATUS,
        }
        message = Struct(fields={'foo': Value(bool_value=True)})
        conn = _Connection({})
        CLIENT = _Client(project=self.PROJECT, connection=conn)
        logger = _Logger()
        UNSENT = [
            ('text', TEXT, None, IID, None, None),
            ('struct', STRUCT, None, None, SEVERITY, None),
            ('proto', message, LABELS, None, None, REQUEST),
        ]
        batch = self._makeOne(logger, client=CLIENT)

        try:
            with batch as other:
                other.log_text(TEXT, insert_id=IID)
                other.log_struct(STRUCT, severity=SEVERITY)
                other.log_proto(message, labels=LABELS, http_request=REQUEST)
                raise _Bugout()
        except _Bugout:
            pass

        self.assertEqual(list(batch.entries), UNSENT)
        self.assertEqual(len(conn._requested), 0)
Exemple #18
0
 def test_log_proto_w_implicit_client(self):
     import json
     from google.protobuf.json_format import MessageToJson
     from google.protobuf.struct_pb2 import Struct, Value
     message = Struct(fields={'foo': Value(bool_value=True)})
     conn = _Connection({})
     client = _Client(self.PROJECT, conn)
     logger = self._makeOne(self.LOGGER_NAME, client=client)
     logger.log_proto(message)
     self.assertEqual(len(conn._requested), 1)
     req = conn._requested[0]
     SENT = {
         'entries': [{
             'logName': 'projects/%s/logs/%s' % (
                 self.PROJECT, self.LOGGER_NAME),
             'protoPayload': json.loads(MessageToJson(message)),
             'resource': {
                 'type': 'global',
             },
         }],
     }
     self.assertEqual(req['method'], 'POST')
     self.assertEqual(req['path'], '/entries:write')
     self.assertEqual(req['data'], SENT)
Exemple #19
0
 def test_commit_w_bound_client(self):
     import json
     from google.protobuf.json_format import MessageToJson
     from google.protobuf.struct_pb2 import Struct, Value
     TEXT = 'This is the entry text'
     STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
     message = Struct(fields={'foo': Value(bool_value=True)})
     IID1 = 'IID1'
     IID2 = 'IID2'
     IID3 = 'IID3'
     conn = _Connection({})
     CLIENT = _Client(project=self.PROJECT, connection=conn)
     logger = _Logger()
     SENT = {
         'logName': logger.path,
         'resource': {
             'type': 'global',
         },
         'entries': [
             {'textPayload': TEXT, 'insertId': IID1},
             {'jsonPayload': STRUCT, 'insertId': IID2},
             {'protoPayload': json.loads(MessageToJson(message)),
              'insertId': IID3},
         ],
     }
     batch = self._makeOne(logger, client=CLIENT)
     batch.log_text(TEXT, insert_id=IID1)
     batch.log_struct(STRUCT, insert_id=IID2)
     batch.log_proto(message, insert_id=IID3)
     batch.commit()
     self.assertEqual(list(batch.entries), [])
     self.assertEqual(len(conn._requested), 1)
     req = conn._requested[0]
     self.assertEqual(req['method'], 'POST')
     self.assertEqual(req['path'], '/entries:write')
     self.assertEqual(req['data'], SENT)
Exemple #20
0
    def test_log_proto_w_timestamp(self):
        import json
        import datetime
        from google.protobuf.json_format import MessageToJson
        from google.protobuf.struct_pb2 import Struct, Value
        message = Struct(fields={'foo': Value(bool_value=True)})
        TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999)
        ENTRIES = [{
            'logName': 'projects/%s/logs/%s' % (
                self.PROJECT, self.LOGGER_NAME),
            'protoPayload': json.loads(MessageToJson(message)),
            'timestamp': '2016-12-31T00:01:02.999999Z',
            'resource': {
                'type': 'global',
            },
        }]
        client = _Client(self.PROJECT)
        api = client.logging_api = _DummyLoggingAPI()
        logger = self._make_one(self.LOGGER_NAME, client=client)

        logger.log_proto(message, timestamp=TIMESTAMP)

        self.assertEqual(api._write_entries_called_with,
                         (ENTRIES, None, None, None))
Exemple #21
0
    def detectEvent(self, session_id, event_name, payload):
        session_client = dialogflow.SessionsClient()
        session = session_client.session_path(self._project_id, session_id)

        parameters = Struct(fields={'value': Value(string_value=payload)})

        js = self.safeParseJson(payload)

        if hasattr(js, 'items'):
            for key, value in js.items():
                nKey = self.normilizeKeyDialogflow(key)
                parameters[nKey] = value

        event_input = dialogflow.EventInput(name=event_name,
                                            language_code='ru-RU',
                                            parameters=parameters)

        query_input = dialogflow.QueryInput(event=event_input)

        try:
            response = session_client.detect_intent(session=session,
                                                    query_input=query_input)
            self.handleDialogflowResponse(response)
        except:
            print("error detect event")
            self._isEndConversation = True

        if not self._isEndConversation:
            self.playSound(self._wake_sound_file)
            print('event conversation continue')
            self.stopDetectHotword()
            self.runDetectIntent(session_id)
        else:
            print('event conversation finished')
            self.stopDetectIntent()
            self.runDetectHotword()
Exemple #22
0
    def test_execute_sql_explicit(self):
        from google.protobuf.struct_pb2 import Struct, Value
        from google.cloud.spanner_v1.proto.type_pb2 import STRING

        SQL = 'SELECT first_name, age FROM citizens'
        database = self._make_database()
        session = self._make_one(database)
        session._session_id = 'DEADBEEF'

        params = Struct(fields={'foo': Value(string_value='bar')})
        param_types = {'foo': STRING}

        with mock.patch(
                'google.cloud.spanner_v1.session.Snapshot') as snapshot:
            found = session.execute_sql(SQL, params, param_types, 'PLAN')

        self.assertIs(found, snapshot().execute_sql.return_value)

        snapshot().execute_sql.assert_called_once_with(
            SQL,
            params,
            param_types,
            'PLAN',
        )
    def test_json_converts_single_object(self):
        s = Struct()
        s['field1'] = {
            'key1': 'value1',
            'key2': 3.0,
            'key3': 1.0,
            'key4': True,
            'key5': None,
        }
        s['field2'] = 'key2'

        converted = protobuf_to_dict(s)
        assert (converted == json.loads("""
{
  "field1": {
    "key1": "value1",
    "key2": 3.0,
    "key3": 1.0,
    "key4": true,
    "key5": null
  },
  "field2": "key2"
}
    """))
Exemple #24
0
    def partition_query(
        self,
        sql,
        params=None,
        param_types=None,
        partition_size_bytes=None,
        max_partitions=None,
        *,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform a ``PartitionQuery`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type partition_size_bytes: int
        :param partition_size_bytes:
            (Optional) desired size for each partition generated.  The service
            uses this as a hint, the actual partition size may differ.

        :type max_partitions: int
        :param max_partitions:
            (Optional) desired maximum number of partitions generated. The
            service uses this as a hint, the actual number of partitions may
            differ.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :rtype: iterable of bytes
        :returns: a sequence of partition tokens

        :raises ValueError:
            for single-use snapshots, or if a transaction ID is
            already associated with the snapshot.
        """
        if not self._multi_use:
            raise ValueError("Cannot use single-use snapshot.")

        if self._transaction_id is None:
            raise ValueError("Transaction not started.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for (key, value) in params.items()
            })
        else:
            params_pb = Struct()

        database = self._session._database
        api = database.spanner_api
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        partition_options = PartitionOptions(
            partition_size_bytes=partition_size_bytes,
            max_partitions=max_partitions)
        request = PartitionQueryRequest(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            partition_options=partition_options,
        )

        trace_attributes = {"db.statement": sql}
        with trace_call(
                "CloudSpanner.PartitionReadWriteTransaction",
                self._session,
                trace_attributes,
        ):
            response = api.partition_query(
                request=request,
                metadata=metadata,
                retry=retry,
                timeout=timeout,
            )

        return [partition.partition_token for partition in response.partitions]
Exemple #25
0
    def execute_sql(
        self,
        sql,
        params=None,
        param_types=None,
        query_mode=None,
        query_options=None,
        request_options=None,
        partition=None,
        retry=gapic_v1.method.DEFAULT,
        timeout=gapic_v1.method.DEFAULT,
    ):
        """Perform an ``ExecuteStreamingSql`` API request.

        :type sql: str
        :param sql: SQL query statement

        :type params: dict, {str -> column value}
        :param params: values for parameter replacement.  Keys must match
                       the names used in ``sql``.

        :type param_types: dict[str -> Union[dict, .types.Type]]
        :param param_types:
            (Optional) maps explicit types for one or more param values;
            required if parameters are passed.

        :type query_mode:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode`
        :param query_mode: Mode governing return of results / query plan.
            See:
            `QueryMode <https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode>`_.

        :type query_options:
            :class:`~google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions`
                or :class:`dict`
        :param query_options:
                (Optional) Query optimizer configuration to use for the given query.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.QueryOptions`

        :type request_options:
            :class:`google.cloud.spanner_v1.types.RequestOptions`
        :param request_options:
                (Optional) Common options for this request.
                If a dict is provided, it must be of the same form as the protobuf
                message :class:`~google.cloud.spanner_v1.types.RequestOptions`.

        :type partition: bytes
        :param partition: (Optional) one of the partition tokens returned
                          from :meth:`partition_query`.

        :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
        :returns: a result set instance which can be used to consume rows.

        :type retry: :class:`~google.api_core.retry.Retry`
        :param retry: (Optional) The retry settings for this request.

        :type timeout: float
        :param timeout: (Optional) The timeout for this request.

        :raises ValueError:
            for reuse of single-use snapshots, or if a transaction ID is
            already pending for multiple-use snapshots.
        """
        if self._read_request_count > 0:
            if not self._multi_use:
                raise ValueError("Cannot re-use single-use snapshot.")
            if self._transaction_id is None:
                raise ValueError("Transaction ID pending.")

        if params is not None:
            if param_types is None:
                raise ValueError(
                    "Specify 'param_types' when passing 'params'.")
            params_pb = Struct(fields={
                key: _make_value_pb(value)
                for key, value in params.items()
            })
        else:
            params_pb = {}

        database = self._session._database
        metadata = _metadata_with_prefix(database.name)
        transaction = self._make_txn_selector()
        api = database.spanner_api

        # Query-level options have higher precedence than client-level and
        # environment-level options
        default_query_options = database._instance._client._query_options
        query_options = _merge_query_options(default_query_options,
                                             query_options)

        if type(request_options) == dict:
            request_options = RequestOptions(request_options)

        request = ExecuteSqlRequest(
            session=self._session.name,
            sql=sql,
            transaction=transaction,
            params=params_pb,
            param_types=param_types,
            query_mode=query_mode,
            partition_token=partition,
            seqno=self._execute_sql_count,
            query_options=query_options,
            request_options=request_options,
        )
        restart = functools.partial(
            api.execute_streaming_sql,
            request=request,
            metadata=metadata,
            retry=retry,
            timeout=timeout,
        )

        trace_attributes = {"db.statement": sql}
        iterator = _restart_on_unavailable(
            restart,
            request,
            "CloudSpanner.ReadWriteTransaction",
            self._session,
            trace_attributes,
        )

        self._read_request_count += 1
        self._execute_sql_count += 1

        if self._multi_use:
            return StreamedResultSet(iterator, source=self)
        else:
            return StreamedResultSet(iterator)
Exemple #26
0
def generic_command(client, args):
    params = json_format.Parse(args.params, Struct())
    response = client.GenericCommand(
        magmad_pb2.GenericCommandParams(command=args.command, params=params), )
    print(response)
Exemple #27
0
def deserialize_properties(props_struct: struct_pb2.Struct,
                           keep_unknowns: Optional[bool] = None) -> Any:
    """
    Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal
    Python types.
    """
    # Check out this link for details on what sort of types Protobuf is going to generate:
    # https://developers.google.com/protocol-buffers/docs/reference/python-generated
    #
    # We assume that we are deserializing properties that we got from a Resource RPC endpoint,
    # which has type `Struct` in our gRPC proto definition.
    if _special_sig_key in props_struct:
        from .. import FileAsset, StringAsset, RemoteAsset, AssetArchive, FileArchive, RemoteArchive  # pylint: disable=import-outside-toplevel
        if props_struct[_special_sig_key] == _special_asset_sig:
            # This is an asset. Re-hydrate this object into an Asset.
            if "path" in props_struct:
                return FileAsset(props_struct["path"])
            if "text" in props_struct:
                return StringAsset(props_struct["text"])
            if "uri" in props_struct:
                return RemoteAsset(props_struct["uri"])
            raise AssertionError(
                "Invalid asset encountered when unmarshalling resource property"
            )
        if props_struct[_special_sig_key] == _special_archive_sig:
            # This is an archive. Re-hydrate this object into an Archive.
            if "assets" in props_struct:
                return AssetArchive(
                    deserialize_property(props_struct["assets"]))
            if "path" in props_struct:
                return FileArchive(props_struct["path"])
            if "uri" in props_struct:
                return RemoteArchive(props_struct["uri"])
            raise AssertionError(
                "Invalid archive encountered when unmarshalling resource property"
            )
        if props_struct[_special_sig_key] == _special_secret_sig:
            return wrap_rpc_secret(deserialize_property(props_struct["value"]))
        if props_struct[_special_sig_key] == _special_resource_sig:
            return deserialize_resource(props_struct, keep_unknowns)
        raise AssertionError(
            "Unrecognized signature when unmarshalling resource property")

    # Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways. Note
    # that if the struct had any secret properties, we push the secretness of the object up to us
    # since we can only set secret outputs on top level properties.
    output = {}
    for k, v in list(props_struct.items()):
        # Unilaterally skip properties considered internal by the Pulumi engine.
        # These don't actually contribute to the exposed shape of the object, do
        # not need to be passed back to the engine, and often will not match the
        # expected type we are deserializing into.
        # Keep "__provider" as it's the property name used by Python dynamic providers.
        if k.startswith("__") and k != "__provider":
            continue

        value = deserialize_property(v, keep_unknowns)
        # We treat values that deserialize to "None" as if they don't exist.
        if value is not None:
            output[k] = value

    return output
 def _dict_to_struct(data):
     return Struct(
         fields={k: TestLogging._to_value(v)
                 for k, v in data.items()})
Exemple #29
0
    def get_bento_service_metadata_pb(self):
        from bentoml.yatai.proto.repository_pb2 import BentoServiceMetadata

        bento_service_metadata = BentoServiceMetadata()
        bento_service_metadata.name = self.config["metadata"]["service_name"]
        bento_service_metadata.version = self.config["metadata"][
            "service_version"]
        bento_service_metadata.created_at.FromDatetime(
            self.config["metadata"]["created_at"])

        if "env" in self.config:
            if "setup_sh" in self.config["env"]:
                bento_service_metadata.env.setup_sh = self.config["env"][
                    "setup_sh"]

            if "conda_env" in self.config["env"]:
                bento_service_metadata.env.conda_env = dump_to_yaml_str(
                    self.config["env"]["conda_env"])

            if "pip_packages" in self.config["env"]:
                for pip_package in self.config["env"]["pip_packages"]:
                    bento_service_metadata.env.pip_packages.append(pip_package)
            if "python_version" in self.config["env"]:
                bento_service_metadata.env.python_version = self.config["env"][
                    "python_version"]
            if "docker_base_image" in self.config["env"]:
                bento_service_metadata.env.docker_base_image = self.config[
                    "env"]["docker_base_image"]

        if "apis" in self.config:
            for api_config in self.config["apis"]:
                if 'handler_type' in api_config:
                    # Convert handler type to input type for saved bundle created
                    # before version 0.8.0
                    input_type = api_config.get('handler_type')
                elif 'input_type' in api_config:
                    input_type = api_config.get('input_type')
                else:
                    input_type = "unknown"

                if 'output_type' in api_config:
                    output_type = api_config.get('output_type')
                else:
                    output_type = "DefaultOutput"

                api_metadata = BentoServiceMetadata.BentoServiceApi(
                    name=api_config["name"],
                    docs=api_config["docs"],
                    input_type=input_type,
                    output_type=output_type,
                )
                if "handler_config" in api_config:
                    # Supports viewing API input config info for saved bundle created
                    # before version 0.8.0
                    for k, v in api_config["handler_config"].items():
                        if k in {'mb_max_latency', 'mb_max_batch_size'}:
                            setattr(api_metadata, k, v)
                        else:
                            api_metadata.input_config[k] = v
                else:
                    if 'mb_max_latency' in api_config:
                        api_metadata.mb_max_latency = api_config[
                            "mb_max_latency"]
                    else:
                        api_metadata.mb_max_latency = DEFAULT_MAX_LATENCY

                    if 'mb_max_batch_size' in api_config:
                        api_metadata.mb_max_batch_size = api_config[
                            "mb_max_batch_size"]
                    else:
                        api_metadata.mb_max_batch_size = DEFAULT_MAX_BATCH_SIZE

                if "input_config" in api_config:
                    for k, v in api_config["input_config"].items():
                        api_metadata.input_config[k] = v

                if "output_config" in api_config:
                    for k, v in api_config["output_config"].items():
                        api_metadata.output_config[k] = v
                api_metadata.batch = api_config.get("batch", False)
                bento_service_metadata.apis.extend([api_metadata])

        if "artifacts" in self.config:
            for artifact_config in self.config["artifacts"]:
                artifact_metadata = BentoServiceMetadata.BentoArtifact()
                if "name" in artifact_config:
                    artifact_metadata.name = artifact_config["name"]
                if "artifact_type" in artifact_config:
                    artifact_metadata.artifact_type = artifact_config[
                        "artifact_type"]
                if "metadata" in artifact_config:
                    if isinstance(artifact_config["metadata"], dict):
                        s = Struct()
                        s.update(artifact_config["metadata"])
                        artifact_metadata.metadata.CopyFrom(s)
                    else:
                        logger.warning(
                            "Tried to get non-dictionary metadata for artifact "
                            f"{artifact_metadata.name}. Ignoring metadata...")
                bento_service_metadata.artifacts.extend([artifact_metadata])

        return bento_service_metadata
Exemple #30
0
    def access(self, resp, req, environ, request_time):
        super().access(resp, req, environ, request_time)

        # Ignore health check
        if environ['PATH_INFO'] == '/healthz':
            return

        # See gunicorn/glogging.py
        status = resp.status
        if isinstance(status, str):
            status = status.split(None, 1)[0]
        now = datetime.datetime.utcnow()

        level = logging.NOTSET
        message = {
            '@type':
            'type.googleapis.com/google.appengine.logging.v1.RequestLog',
            'ip':
            environ.get('REMOTE_ADDR'),
            'startTime':
            format_time(now - request_time),
            'endTime':
            format_time(now),
            'latency':
            '%d.%06ds' % (request_time.seconds, request_time.microseconds),
            'method':
            environ['REQUEST_METHOD'],
            'resource':
            environ['PATH_INFO'],
            'httpVersion':
            environ['SERVER_PROTOCOL'],
            'status':
            status,
            'responseSize':
            getattr(resp, 'sent', None),
            'userAgent':
            environ.get('HTTP_USER_AGENT'),
        }

        request_log = environ.get(REQUEST_LOG_VARIABLE)
        if request_log:
            message['urlMapEntry'] = request_log.endpoint
            message['line'] = [
                {
                    'time':
                    format_time(
                        datetime.datetime.utcfromtimestamp(record.created)),
                    'severity':
                    record.levelname,
                    'logMessage':
                    access_formatter.format(record),
                    # The log viewer only wants real App Engine files, so we
                    # can't put the actual file here.
                    'sourceLocation':
                    None,
                } for record in request_log.lines
            ]
            level = max(
                (record.levelno for record in request_log.lines),
                default=logging.NOTSET,
            )

        if level > logging.NOTSET:
            severity = logging.getLevelName(level)
        else:
            severity = None

        struct_pb = ParseDict(message, Struct())
        self.cloud_logger.log_proto(struct_pb, severity=severity)