def test__update_state_metadata(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Value from google.cloud._testing import _Monkey from google.cloud import operation as MUT operation = self._make_one(None, None) self.assertIsNone(operation.metadata) val_pb = Value(number_value=1337) type_url = "type.googleapis.com/%s" % (Value.DESCRIPTOR.full_name, ) val_any = Any(type_url=type_url, value=val_pb.SerializeToString()) operation_pb = operations_pb2.Operation(metadata=val_any) with _Monkey(MUT, _TYPE_URL_MAP={type_url: Value}): operation._update_state(operation_pb) self.assertEqual(operation.metadata, val_pb)
def send(self, request: Message, response_type: type): data = Any() data.Pack(request) msg = NetMsg() msg.token = self.token msg.data.CopyFrom(data) senddata = msg.SerializeToString() self._socket.send(len(senddata).to_bytes(4, 'little')) self._socket.send(senddata) recv_data = self._socket.recv(self._MAX_LENGTH) size = int.from_bytes(recv_data[0:4], 'little') recv_data = recv_data[4:] while len(recv_data) < size: recv_data += self._socket.recv(self._MAX_LENGTH) rsp = NetMsg() rsp.ParseFromString(recv_data) ret = response_type() rsp.data.Unpack(ret) return ret
def testAddCollectionDef(self): test_dir = _TestDir("good_collection") filename = os.path.join(test_dir, "metafile") with self.test_session(): # Creates a graph. v0 = tf.Variable(10.0, name="v0") var = tf.Variable(tf.constant(0, dtype=tf.int64)) count_up_to = var.count_up_to(3) input_queue = tf.FIFOQueue(30, tf.float32, shared_name="collection_queue") qr = tf.train.QueueRunner(input_queue, [count_up_to]) tf.initialize_all_variables() # Creates a saver. save = tf.train.Saver({"v0": v0}) # Adds a set of collections. tf.add_to_collection("int_collection", 3) tf.add_to_collection("float_collection", 3.5) tf.add_to_collection("string_collection", "hello") tf.add_to_collection("variable_collection", v0) # Add QueueRunners. tf.train.add_queue_runner(qr) # Adds user_defined proto in three formats: string, bytes and Any. queue_runner = queue_runner_pb2.QueueRunnerDef(queue_name="test_queue") tf.add_to_collection("user_defined_string_collection", str(queue_runner)) tf.add_to_collection("user_defined_bytes_collection", queue_runner.SerializeToString()) any_buf = Any() any_buf.Pack(queue_runner) tf.add_to_collection("user_defined_any_collection", any_buf) # Generates MetaGraphDef. meta_graph_def = save.export_meta_graph(filename) self.assertTrue(meta_graph_def.HasField("saver_def")) self.assertTrue(meta_graph_def.HasField("graph_def")) collection_def = meta_graph_def.collection_def self.assertEqual(len(collection_def), 10) with tf.Graph().as_default(): # Restores from MetaGraphDef. new_saver = tf.train.import_meta_graph(filename) # Generates a new MetaGraphDef. new_meta_graph_def = new_saver.export_meta_graph() # It should be the same as the original. self.assertProtoEquals(meta_graph_def, new_meta_graph_def)
def packContract(self, contractType, newContract, data = None, permission_id = None): tx = vision.Transaction() tx.raw_data.timestamp = 1575712492061 tx.raw_data.expiration = 1575712551000 tx.raw_data.ref_block_hash = bytes.fromhex("95DA42177DB00507") tx.raw_data.ref_block_bytes = bytes.fromhex("3DCE") if data: tx.raw_data.data = data c = tx.raw_data.contract.add() c.type = contractType param = Any() param.Pack(newContract) c.parameter.CopyFrom(param) if permission_id: c.Permission_id = permission_id return tx.raw_data.SerializeToString()
def state1(context, message): # State can be accessed directly by getting the state name (as registered when binding the function). # Remember that the state has to be a valid Protocol Buffers message, and has to be packed into a google.protobuf.Any. pb_any = context['counter'] # this raises a ValueError is the accessed state name wasn't registered if pb_any: # state was previously stored for this address counter = Counter() pb_any.Unpack(counter) counter.value += 1 pb_any.Pack(counter) context['counter'] = pb_any else: # state was not stored for this address counter = Counter() counter.value = 1 pb_any = Any() pb_any.Pack(counter) context['counter'] = pb_any
def test_from_pb_w_unknown_metadata(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct, Value TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name,) client = _Client() meta = Struct(fields={'foo': Value(string_value=u'Bar')}) metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._getTargetClass() operation = klass.from_pb(operation_pb, client) self.assertEqual(operation.name, self.OPERATION_NAME) self.assertTrue(operation.client is client) self.assertTrue(operation.pb_metadata is None) self.assertEqual(operation.metadata, {})
def test_create(self): import datetime from google.api_core import operation from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.cloud.bigtable_admin_v2.proto import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from google.cloud._helpers import _datetime_to_pb_timestamp from tests.unit._testing import _FakeStub NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) channel = _make_channel() client = self._make_client(project=self.PROJECT, channel=channel, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID, display_name=self.DISPLAY_NAME) # Create response_pb metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) type_url = 'type.googleapis.com/%s' % ( messages_v2_pb2.CreateInstanceMetadata.DESCRIPTOR.full_name, ) response_pb = operations_pb2.Operation( name=self.OP_NAME, metadata=Any( type_url=type_url, value=metadata.SerializeToString(), )) # Patch the stub used by the API method. stub = _FakeStub(response_pb) client._instance_admin_client.bigtable_instance_admin_stub = stub # Perform the method and check the result. result = instance.create() self.assertIsInstance(result, operation.Operation) # self.assertEqual(result.operation.name, self.OP_NAME) self.assertIsInstance(result.metadata, messages_v2_pb2.CreateInstanceMetadata)
def run(): # NOTE(gRPC Python Team): .close() is possible on a channel and should be # used in circumstances in which the with statement does not fit the needs # of the code. if (':' in args.server): ip = args.server else: ip = args.server + ':' + args.port print(f'connecting server: {ip}') with grpc.insecure_channel(ip) as channel: stub = thyroidrpc_pb2_grpc.ThyroidaiGrpcStub(channel) ################################################ ### 1. test first stage real time detection ### ################################################ isRaw = True if isRaw: img = cv2.imread('./images/190528152340059.png', 0) h, w = img.shape img=img.tostring() else: with open('./images/190528152340059.png', 'rb') as f: img = f.read() h, w = cv2.imread('./images/190528152340059.png', 0).shape response = stub.Detect(thyroidrpc_pb2.DetectRequest(isRaw=isRaw, image=img, height=h, width=w)) if (response.code !=0 ): print('error code: {}'.format(response.code)) print('error message: {}'.format(response.msg)) else: anypb = Any() anypb.CopyFrom(response.data) nodules = thyroidrpc_pb2.Nodules() anypb.Unpack(nodules) print(f'nodule number is: {nodules.nums}') for node in nodules.nodule: print(f'nudule {node.n}: ({node.x}, {node.y}, {node.w}, {node.h})')
def fun(context, message): # state access seen = context.state('seen').unpack(SeenCount) seen.seen += 1 context.state('seen').pack(seen) # regular state access seenAny = context['seen'] seenAny.Unpack(seen) # sending and replying context.pack_and_reply(seen) any = Any() any.type_url = 'type.googleapis.com/k8s.demo.SeenCount' context.send("bar.baz/foo", "12345", any) # delayed messages context.send_after(timedelta(hours=1), "night/owl", "1", any) # egresses context.send_egress("foo.bar.baz/my-egress", any) context.pack_and_send_egress("foo.bar.baz/my-egress", seen) # kafka egress context.pack_and_send_egress("sdk/kafka", kafka_egress_record(topic="hello", key=u"hello world", value=seen)) context.pack_and_send_egress("sdk/kafka", kafka_egress_record(topic="hello", value=seen)) # AWS Kinesis generic egress context.pack_and_send_egress("sdk/kinesis", kinesis_egress_record( stream="hello", partition_key=u"hello world", value=seen, explicit_hash_key=u"1234")) context.pack_and_send_egress("sdk/kinesis", kinesis_egress_record( stream="hello", partition_key=u"hello world", value=seen))
def test_list_backup_operations_defaults(self): from google.cloud.spanner_admin_database_v1 import CreateBackupMetadata from google.cloud.spanner_admin_database_v1 import DatabaseAdminClient from google.cloud.spanner_admin_database_v1 import ListBackupOperationsRequest from google.cloud.spanner_admin_database_v1 import ListBackupOperationsResponse from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any api = DatabaseAdminClient(credentials=mock.Mock()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) create_backup_metadata = Any() create_backup_metadata.Pack( CreateBackupMetadata.pb( CreateBackupMetadata(name="backup", database="database") ) ) operations_pb = ListBackupOperationsResponse( operations=[ operations_pb2.Operation(name="op1", metadata=create_backup_metadata) ] ) ldo_api = api._transport._wrapped_methods[ api._transport.list_backup_operations ] = mock.Mock(return_value=operations_pb) instance.list_backup_operations() expected_metadata = ( ("google-cloud-resource-prefix", instance.name), ("x-goog-request-params", "parent={}".format(instance.name)), ) ldo_api.assert_called_once_with( ListBackupOperationsRequest(parent=self.INSTANCE_NAME), metadata=expected_metadata, retry=mock.ANY, timeout=mock.ANY, )
def test_list_backup_operations_w_options(self): from google.api_core.operation import Operation from google.cloud.spanner_admin_database_v1.gapic import database_admin_client from google.cloud.spanner_admin_database_v1.proto import backup_pb2 from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any api = database_admin_client.DatabaseAdminClient(mock.Mock()) client = _Client(self.PROJECT) client.database_admin_api = api instance = self._make_one(self.INSTANCE_ID, client) create_backup_metadata = Any() create_backup_metadata.Pack(backup_pb2.CreateBackupMetadata()) operations_pb = backup_pb2.ListBackupOperationsResponse( operations=[ operations_pb2.Operation(name="op1", metadata=create_backup_metadata) ] ) ldo_api = api._inner_api_calls["list_backup_operations"] = mock.Mock( return_value=operations_pb ) operations = instance.list_backup_operations(filter_="filter", page_size=10) for op in operations: self.assertIsInstance(op, Operation) expected_metadata = [ ("google-cloud-resource-prefix", instance.name), ("x-goog-request-params", "parent={}".format(instance.name)), ] ldo_api.assert_called_once_with( backup_pb2.ListBackupOperationsRequest( parent=self.INSTANCE_NAME, filter="filter", page_size=10 ), metadata=expected_metadata, retry=mock.ANY, timeout=mock.ANY, )
def _make_log_entry_with_extras(self, labels, iid, type_url, now): from google.api.monitored_resource_pb2 import MonitoredResource from google.cloud.grpc.logging.v2.log_entry_pb2 import LogEntry from google.cloud.grpc.logging.v2.log_entry_pb2 import ( LogEntryOperation) from google.logging.type.http_request_pb2 import HttpRequest from google.logging.type.log_severity_pb2 import WARNING from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp resource_pb = MonitoredResource( type='global', labels=labels) proto_payload = Any(type_url=type_url) timestamp_pb = _datetime_to_pb_timestamp(now) request_pb = HttpRequest( request_url='http://example.com/requested', request_method='GET', status=200, referer='http://example.com/referer', user_agent='AGENT', cache_hit=True, request_size=256, response_size=1024, remote_ip='1.2.3.4', ) operation_pb = LogEntryOperation( producer='PRODUCER', first=True, last=True, id='OPID', ) entry_pb = LogEntry(log_name=self.LOG_PATH, resource=resource_pb, proto_payload=proto_payload, timestamp=timestamp_pb, severity=WARNING, insert_id=iid, http_request=request_pb, labels=labels, operation=operation_pb) return entry_pb
def _pack_values(values): """Pack protobuf values.""" packed = {} if values is None: return packed for key, value in six.iteritems(values): packed_value = Any() if isinstance(value, float): packed_value.Pack(wrappers_pb2.DoubleValue(value=value)) elif isinstance(value, six.integer_types): packed_value.Pack(wrappers_pb2.Int64Value(value=value)) elif isinstance(value, six.string_types): packed_value.Pack(wrappers_pb2.StringValue(value=value)) else: raise ValueError('Unknown stat type for ' + key) packed[key] = packed_value return packed
def get_files(self): ''' 根据官方的golang实现,未测试 Args: 无参数 Returns: 返回tiller所需的文件对象列表 ''' file_list = [] for root, _, files in os.walk(self.source_directory, topdown=True): for tpl_file in files: relativepath = os.path.relpath(os.path.join(root, tpl_file), self.source_directory) if self.selectfile(relativepath): file_list.append( Any(type_url=relativepath, value=open(os.path.join(root, tpl_file), "rb").read())) return file_list
def test_constructor_with_any(self): from google.protobuf.any_pb2 import Any payload = Any() pb_entry = self._make_one(payload=payload, logger=mock.sentinel.logger) self.assertIs(pb_entry.payload, payload) self.assertIs(pb_entry.payload_pb, payload) self.assertIsNone(pb_entry.payload_json) self.assertIs(pb_entry.logger, mock.sentinel.logger) self.assertIsNone(pb_entry.insert_id) self.assertIsNone(pb_entry.timestamp) self.assertIsNone(pb_entry.labels) self.assertIsNone(pb_entry.severity) self.assertIsNone(pb_entry.http_request) self.assertIsNone(pb_entry.trace) self.assertIsNone(pb_entry.span_id) self.assertIsNone(pb_entry.trace_sampled) self.assertIsNone(pb_entry.source_location)
def engine_fuzz(request, _): """Run engine fuzzer.""" engine_impl = engine.get(request.engine) result, fuzzer_metadata = fuzz_task.run_engine_fuzzer( engine_impl, request.target_name, request.sync_corpus_directory, request.testcase_directory, ) crashes = [ untrusted_runner_pb2.EngineCrash( input_path=crash.input_path, stacktrace=protobuf_utils.encode_utf8_if_unicode(crash.stacktrace), reproduce_args=crash.reproduce_args, crash_time=crash.crash_time, ) for crash in result.crashes ] packed_stats = {} for key, value in six.iteritems(result.stats): packed_value = Any() if isinstance(value, float): packed_value.Pack(wrappers_pb2.DoubleValue(value=value)) elif isinstance(value, int): packed_value.Pack(wrappers_pb2.Int32Value(value=value)) elif isinstance(value, six.string_types): packed_value.Pack(wrappers_pb2.StringValue(value=value)) else: raise ValueError("Unknown stat type for " + key) packed_stats[key] = packed_value return untrusted_runner_pb2.EngineFuzzResponse( logs=protobuf_utils.encode_utf8_if_unicode(result.logs), command=result.command, crashes=crashes, stats=packed_stats, time_executed=result.time_executed, fuzzer_metadata=fuzzer_metadata, )
def _set_properties(stac_data, properties, type_url_prefix): """ pack properties and then set the properties member value to the input. :param stac_data: :param properties: :param type_url_prefix: :return: """ if properties is None: return # pack the properties into an Any field packed_properties = Any() packed_properties.Pack(properties, type_url_prefix=type_url_prefix + properties.DESCRIPTOR.full_name) # overwrite the previous properties field with this updated version stac_data.properties.CopyFrom(packed_properties) properties = properties return stac_data, properties
def _consume(self): while True: try: for message in self._consumer: _log.info(f'Message received - {message}') any = Any() any.ParseFromString(message.value) if any.Is(TaskException.DESCRIPTOR): self._raise_exception(any, TaskException) elif any.Is(TaskResult.DESCRIPTOR): self._return_result(any, TaskResult) elif any.Is(TaskActionException.DESCRIPTOR): self._raise_exception(any, TaskActionException) elif any.Is(TaskActionResult.DESCRIPTOR): self._return_action_result(any, TaskActionResult) except Exception as ex: _log.warning(f'Exception in consumer thread - {ex}', exc_info=ex)
def test_integration_incomplete_context(self): functions = StatefulFunctions() @functions.bind( typename='org.foo/bar', states=[ StateSpec('seen'), StateSpec('missing_state_1'), StateSpec('missing_state_2', expire_after=AfterWrite(timedelta(milliseconds=2000))) ]) async def fun(context, message): pass # # build an invocation that provides only 'seen' state # builder = InvocationBuilder() builder.with_target("org.foo", "bar", "0") seen = SeenCount() seen.seen = 100 builder.with_state("seen", seen) builder.with_invocation(Any(), None) # # invoke # result_json = async_round_trip(functions, builder) # # assert indicated missing states # missing_state_1_spec = json_at(result_json, NTH_MISSING_STATE_SPEC(0)) self.assertEqual(missing_state_1_spec['state_name'], 'missing_state_1') missing_state_2_spec = json_at(result_json, NTH_MISSING_STATE_SPEC(1)) self.assertEqual(missing_state_2_spec['state_name'], 'missing_state_2') self.assertEqual(missing_state_2_spec['expiration_spec']['mode'], 'AFTER_WRITE') self.assertEqual(missing_state_2_spec['expiration_spec']['expire_after_millis'], '2000')
def __sendRequest(self, objs): items = [] for obj in objs: request_any = Any() request_any.Pack(obj) items.append(request_any) request = Request_pb2.Request() request.items.extend(items) data = request.SerializeToString() length = 16 - (len(data) % 16) data += bytes([length]) * length aes = AES.new(shbus.consts.key, shbus.consts.aes_mode, shbus.consts.iv) data = aes.encrypt(data) payload = {'request': base64.b64encode(data)} r = requests.post(shbus.consts.MONITOR_URL, data=payload) aes = AES.new(shbus.consts.key, shbus.consts.aes_mode, shbus.consts.iv) return shbus.utils.unpad(aes.decrypt(r.content))
def test__update_state_response(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Value from google.cloud._testing import _Monkey from google.cloud import operation as MUT operation = self._make_one(None, None) self.assertIsNone(operation.error) self.assertIsNone(operation.response) response_pb = Value(string_value='totes a response') type_url = 'type.googleapis.com/%s' % (Value.DESCRIPTOR.full_name, ) response_any = Any(type_url=type_url, value=response_pb.SerializeToString()) operation_pb = operations_pb2.Operation(response=response_any) with _Monkey(MUT, _TYPE_URL_MAP={type_url: Value}): operation._update_state(operation_pb) self.assertIsNone(operation.error) self.assertEqual(operation.response, response_pb)
def environment_initialized(self, run_options: RunOptions) -> None: self.run_options = run_options # Tuple of (major, minor, patch) vi = sys.version_info env_params = run_options.environment_parameters msg = TrainingEnvironmentInitialized( python_version=f"{vi[0]}.{vi[1]}.{vi[2]}", mlagents_version=mlagents.trainers.__version__, mlagents_envs_version=mlagents_envs.__version__, torch_version=torch_utils.torch.__version__, torch_device_type=torch_utils.default_device().type, num_envs=run_options.env_settings.num_envs, num_environment_parameters=len(env_params) if env_params else 0, ) any_message = Any() any_message.Pack(msg) env_init_msg = OutgoingMessage() env_init_msg.set_raw_bytes(any_message.SerializeToString()) super().queue_message_to_send(env_init_msg)
def train_model(self, train_dir: Path) -> Model: self._param_grid_event.wait() version = self.get_new_version() with self._param_grid_lock: param_grid = self._param_grid best_model = self.get_best_model(train_dir, param_grid) if self.context.node_index == 0: with self._train_condition: self._add_train_results(version, best_model) while True: if len(self._train_results[version]) == len( self.context.nodes): results = self._train_results[version] del self._train_results[version] break self._train_condition.wait() model, params, score = max(results, key=lambda item: item[2]) logger.info( 'Best parameters found across all nodes: {}'.format(params)) return SVMModel(model, version, self.feature_provider, self.probability) else: message = Any() message.Pack( SVMTrainerMessage(setTrainResult=SetTrainResult( version=version, params=json.dumps(best_model[1]), score=best_model[2], model=pickle.dumps(best_model[0])))) self.context.nodes[0].internal.MessageInternal( InternalMessage(searchId=self._search_id, trainerIndex=self._trainer_index, message=message)) return SVMModel(best_model[0], version, self.feature_provider, self.probability)
def _build_executable_spec( node_id: str, spec: any_pb2.Any) -> local_deployment_config_pb2.ExecutableSpec: """Builds ExecutableSpec given the any proto from IntermediateDeploymentConfig.""" result = local_deployment_config_pb2.ExecutableSpec() if spec.Is(result.python_class_executable_spec.DESCRIPTOR): spec.Unpack(result.python_class_executable_spec) elif spec.Is(result.container_executable_spec.DESCRIPTOR): spec.Unpack(result.container_executable_spec) elif spec.Is(result.beam_executable_spec.DESCRIPTOR): spec.Unpack(result.beam_executable_spec) else: raise ValueError( 'Executor spec of {} is expected to be of one of the ' 'types of tfx.orchestration.deployment_config.ExecutableSpec.spec ' 'but got type {}'.format(node_id, spec.type_url)) return result
def _predict_grpc_custom_data(self, request): tfrequest = predict_pb2.PredictRequest() # handle input # # Unpack custom data into tfrequest - taking raw inputs prepared by the user. # This allows the use case when the model's input is not a single tftensor # but a map of tensors like defined in predict.proto: # PredictRequest.inputs: map<string, TensorProto> request.customData.Unpack(tfrequest) # handle prediction tfresponse = self._handle_grpc_prediction(tfrequest) # handle result # # Pack tfresponse into the SeldonMessage's custom data - letting user handle # raw outputs. This allows the case when the model's output is not a single tftensor # but a map of tensors like defined in predict.proto: # PredictResponse: map<string, TensorProto> custom_data = Any() custom_data.Pack(tfresponse) return prediction_pb2.SeldonMessage(customData=custom_data)
def test__update_state_with_empty_response(self): from google.cloud.proto.speech.v1 import cloud_speech_pb2 from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any # Simulate an empty response (rather than no response yet, which # is distinct). response = cloud_speech_pb2.LongRunningRecognizeResponse(results=[]) type_url = 'type.googleapis.com/%s' % response.DESCRIPTOR.full_name any_pb = Any( type_url=type_url, value=response.SerializeToString(), ) operation_pb = operations_pb2.Operation( name=self.OPERATION_NAME, response=any_pb, ) # Establish that we raise ValueError at state update time. client = object() operation = self._make_one(self.OPERATION_NAME, client) with self.assertRaises(ValueError): operation._update_state(operation_pb)
def test_from_pb_w_unknown_metadata(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.json_format import ParseDict from google.protobuf.struct_pb2 import Struct from google.cloud._testing import _Monkey from google.cloud import operation as MUT type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name, ) client = _Client() meta = ParseDict({'foo': 'Bar'}, Struct()) metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): operation = klass.from_pb(operation_pb, client) self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertEqual(operation.metadata, meta) self.assertEqual(operation.caller_metadata, {})
def default_error_from_exception(exc_info, code=None, message=None): """ Create a new GrpcError instance representing an underlying exception. If the `GRPC_DEBUG` key is set in the Nameko config, the `status` message will capture the underyling traceback in a `google.rpc.error_details.DebugInfo` message. """ exc_type, exc, tb = exc_info code = code or StatusCode.UNKNOWN message = message or str(exc) status = Status(code=STATUS_CODE_ENUM_TO_INT_MAP[code], message=message) if config.get("GRPC_DEBUG"): debug_info = Any() debug_info.Pack( DebugInfo( stack_entries=traceback.format_exception(*exc_info), detail=str(exc), ) ) status.details.append(debug_info) return GrpcError(code=code, message=message, status=status)
def _instance_api_response_for_create(self): import datetime from google.api_core import operation from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable_admin_v2.gapic import ( bigtable_instance_admin_client) from google.cloud.bigtable_admin_v2.proto import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from google.cloud.bigtable_admin_v2.types import instance_pb2 NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) type_url = 'type.googleapis.com/{}'.format( messages_v2_pb2.CreateInstanceMetadata.DESCRIPTOR.full_name) response_pb = operations_pb2.Operation( name=self.OP_NAME, metadata=Any( type_url=type_url, value=metadata.SerializeToString(), )) response = operation.from_gapic( response_pb, mock.Mock(), instance_pb2.Instance, metadata_type=messages_v2_pb2.CreateInstanceMetadata, ) project_path_template = 'projects/{}' location_path_template = 'projects/{}/locations/{}' instance_api = mock.create_autospec( bigtable_instance_admin_client.BigtableInstanceAdminClient) instance_api.create_instance.return_value = response instance_api.project_path = project_path_template.format instance_api.location_path = location_path_template.format return instance_api, response
def _append_file_to_result(root, rel_folder_path, file): abspath = os.path.abspath(os.path.join(root, file)) relpath = os.path.join(rel_folder_path, file) encodings = ('utf-8', 'latin1') unicode_errors = [] for encoding in encodings: try: with open(abspath, 'r') as f: file_contents = f.read().encode(encoding) except OSError as e: LOG.debug( 'Failed to open and read file %s in the helm ' 'chart directory.', abspath) raise chartbuilder_exceptions.FilesLoadException( file=abspath, details=e) except UnicodeError as e: LOG.debug('Attempting to read %s using encoding %s.', abspath, encoding) msg = "(encoding=%s) %s" % (encoding, str(e)) unicode_errors.append(msg) else: break if len(unicode_errors) == 2: LOG.debug( 'Failed to read file %s in the helm chart directory.' ' Ensure that it is encoded using utf-8.', abspath) raise chartbuilder_exceptions.FilesLoadException( file=abspath, clazz=unicode_errors[0].__class__.__name__, details='\n'.join(e for e in unicode_errors)) non_template_files.append( Any(type_url=relpath, value=file_contents))