def create_cluster(self, cluster: Union[Dict, Cluster], project_id: str, retry: Retry = DEFAULT, timeout: float = DEFAULT) -> str: """ Creates a cluster, consisting of the specified number and type of Google Compute Engine instances. :param cluster: A Cluster protobuf or dict. If dict is provided, it must be of the same form as the protobuf message :class:`google.cloud.container_v1.types.Cluster` :type cluster: dict or google.cloud.container_v1.types.Cluster :param project_id: Google Cloud project ID :type project_id: str :param retry: A retry object (``google.api_core.retry.Retry``) used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :return: The full url to the new, or existing, cluster :raises: ParseError: On JSON parsing problems when trying to convert dict AirflowException: cluster is not dict type nor Cluster proto type """ if isinstance(cluster, dict): cluster_proto = Cluster() cluster = ParseDict(cluster, cluster_proto) elif not isinstance(cluster, Cluster): raise AirflowException( "cluster is not instance of Cluster proto or python dict") self._append_label(cluster, 'airflow-version', 'v' + version.version) self.log.info("Creating (project_id=%s, zone=%s, cluster_name=%s)", project_id, self.location, cluster.name) try: resource = self.get_conn().create_cluster(project_id=project_id, zone=self.location, cluster=cluster, retry=retry, timeout=timeout) resource = self.wait_for_operation(resource) return resource.target_link except AlreadyExists as error: self.log.info('Assuming Success: %s', error.message) return self.get_cluster(name=cluster.name, project_id=project_id)
def test_put_successfully(self): config = { 'participants': [{ 'name': 'party_leader', 'url': '127.0.0.1:5000', 'domain_name': 'fl-leader.com' }], 'variables': [{ 'name': 'namespace', 'value': 'leader' }, { 'name': 'basic_envs', 'value': '{}' }, { 'name': 'storage_root_dir', 'value': '/' }, { 'name': 'EGRESS_URL', 'value': '127.0.0.1:1991' }] } project = Project( name='test', config=ParseDict(config, project_pb2.Project()).SerializeToString()) db.session.add(project) workflow = Workflow( name='test-workflow', project_id=1, state=WorkflowState.NEW, transaction_state=TransactionState.PARTICIPANT_PREPARE, target_state=WorkflowState.READY) db.session.add(workflow) db.session.commit() db.session.refresh(workflow) response = self.put_helper(f'/api/v2/workflows/{workflow.id}', data={ 'forkable': True, 'config': { 'group_alias': 'test-template' }, 'comment': 'test comment' }) self.assertEqual(response.status_code, HTTPStatus.OK) updated_workflow = Workflow.query.get(workflow.id) self.assertIsNotNone(updated_workflow.config) self.assertTrue(updated_workflow.forkable) self.assertEqual(updated_workflow.comment, 'test comment') self.assertEqual(updated_workflow.target_state, WorkflowState.READY)
def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_and_project_id): hook = stackdriver.StackdriverHook() existing_notification_channel = ParseDict(TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel()) notification_channel_to_be_created = ParseDict( TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel() ) mock_channel_client.return_value.list_notification_channels.return_value = [ existing_notification_channel ] hook.upsert_channel( channels=json.dumps({"channels": [TEST_NOTIFICATION_CHANNEL_1, TEST_NOTIFICATION_CHANNEL_2]}), project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( name='projects/{project}'.format(project=PROJECT_ID), filter_=None, order_by=None, page_size=None, retry=DEFAULT, timeout=DEFAULT, metadata=None, ) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( notification_channel=existing_notification_channel, retry=DEFAULT, timeout=DEFAULT, metadata=None ) notification_channel_to_be_created.ClearField('name') mock_channel_client.return_value.create_notification_channel.assert_called_once_with( name='projects/{project}'.format(project=PROJECT_ID), notification_channel=notification_channel_to_be_created, retry=DEFAULT, timeout=DEFAULT, metadata=None )
def loadModel(stub, return_bid): load_info = { 'bid': return_bid, 'model': { 'implhash': "226a7354795692913f24bee21b0cd387", 'version': "1", }, 'encrypted': 0, 'a64key': "", 'pvtkey': "", } response = stub.ReloadModelOnBackend( ParseDict(load_info, be_pb2.LoadRequest())) print("grpc.backend.reloadModelOnBackend >>>", response.code, response.msg)
def createAndLoadModelV2(stub): load_info = { 'backend': {'impl': "tensorflow.frozen"}, 'model': { 'fullhash': "226a7354795692913f24bee21b0cd387", }, 'encrypted': 0, 'a64key': "", 'pvtkey': "", } response = stub.CreateAndLoadModelV2( ParseDict(load_info, be_pb2.FullLoadRequestV2())) print("grpc.backend.createAndLoadModel >>>", response.code, response.msg) return response.msg
def submit_query(account_id, transaction): print("start iroha query") # iroha = Iroha(account_id) new_tx = queries_pb2.Query() print(f"new query {new_tx}") try: transaction = ParseDict(transaction, new_tx) print(transaction) iroha_host_addr = "127.0.0.1" iroha_port = "50051" return send_query_print_status_and_return_result( iroha_host_addr, iroha_port, transaction) except Exception as e: print(e)
def altura_da_pessoa(skeletons): skeletons_pb = ParseDict(skeletons, ObjectAnnotations()) altura_da_pessoa = None for skeletons in skeletons_pb.objects: parts = {} for part in skeletons.keypoints: parts[part.id] = (part.position.x, part.position.y, part.position.z) if altura_da_pessoa: altura_da_pessoa = parts[1][2] else: altura_da_pessoa = 0 break return altura_da_pessoa
def _generate_messages(self, count): return [ ParseDict( { "ack_id": "%s" % i, "message": { "data": 'Message {}'.format(i).encode('utf8'), "attributes": {"type": "generated message"}, }, }, ReceivedMessage(), ) for i in range(1, count + 1) ]
def _value_to_pb(value, proto_type): """ Convert a value to protobuf. e.g. BoolValue, Int32Value. Args: value (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ data_type_pb = getattr(google_dot_protobuf_dot_wrappers__pb2, proto_type)() ParseDict(value, data_type_pb) return data_type_pb
def _dict_mapping_to_pb(mapping, proto_type): """ Convert a dict to protobuf. Args: mapping (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ converted_pb = getattr(trace_pb2, proto_type)() ParseDict(mapping, converted_pb) return converted_pb
def inferLocal(inf_stub, return_bid, r): auuid = str(uuid.uuid4()) infer = { 'bid': return_bid, 'uuid': auuid, 'path': test_image, } response = inf_stub.InferenceLocal( ParseDict(infer, inf_pb2.InferRequest())) print("grpc.inference.inferenceLocal >>>", response.code, response.msg) v = None while v is None: v = r.get(auuid) print(v)
def _generate_messages(self, count) -> List[ReceivedMessage]: return [ ParseDict( { "ack_id": str(i), "message": { "data": f'Message {i}'.encode('utf8'), "attributes": {"type": "generated message"}, }, }, ReceivedMessage(), ) for i in range(1, count + 1) ]
def submit_tx(account_id, transaction): print("start iroha") iroha = Iroha(account_id) new_tx = iroha.transaction([]) iroha_host_addr = "127.0.0.1" iroha_port = "50051" try: transaction = ParseDict(transaction, new_tx) print(transaction) result = send_transaction_return_result(iroha_host_addr, iroha_port, transaction) return result except Exception as e: print(e)
def ReloadModelOnBackend(self, request, context): try: ret = backend.reloadModelOnBackend(MessageToDict(request)) return ParseDict(ret, c_pb2.ResultReply()) except ReloadModelOnBackendError as e: return error_reply.error_msg(c_pb2, ReloadModelOnBackendError, exception=e) except Exception as e: logging.exception(e) return error_reply.error_msg( c_pb2, RunTimeException, msg="failed to (re)load model on backend: {}".format(repr(e)))
def to_tflow(graph): import tensorflow as tf name_lookup = lambda n: graph[n][0]['label'] if n in graph else str(n) wrap = lambda arg: ({ 'tensor': MessageToDict(tf.make_tensor_proto(arg)) } if isinstance(arg, np.ndarray) else arg) nodes = [{ 'name': attr['label'], 'op': attr['type'], 'attr': {k: wrap(v) for (k, v) in attr['params'].items()}, 'input': [name_lookup(i) for i in attr['inputs']] } for name, attr in graph.items()] return ParseDict({'node': nodes, 'library': {}}, tf.GraphDef())
def test_from_pb_w_unknown_metadata(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.json_format import ParseDict from google.protobuf.struct_pb2 import Struct from google.cloud._testing import _Monkey from google.cloud import operation as MUT type_url = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name, ) client = _Client() meta = ParseDict({'foo': 'Bar'}, Struct()) metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP={type_url: Struct}): operation = klass.from_pb(operation_pb, client) self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertEqual(operation.metadata, meta) self.assertEqual(operation.caller_metadata, {})
def hello_test_business(self, msg): """ :param msg: request msg :return: """ proto_data = helloworld_pb2.HelloMessage() # ParseDict(msg, proto_data) # 格式化msg response = self.client.hello_test.future(proto_data) # 向server发送数据 # response = self.client.hello_test(proto_data) # 向server发送数据 response.add_done_callback( self.hello_callback) # 回调函数, 发送数据使用异步[future]时, 必须加回调函数 return response
def setUp(self): super().setUp() with open( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'test.tar.gz'), 'rb') as file: self.TEST_CERTIFICATES = str(b64encode(file.read()), encoding='utf-8') self.default_project = Project() self.default_project.name = 'test-self.default_project' self.default_project.set_config( ParseDict( { 'participants': [{ 'name': 'test-participant', 'domain_name': 'fl-test.com', 'url': '127.0.0.1:32443' }], 'variables': [{ 'name': 'test', 'value': 'test' }] }, ProjectProto())) self.default_project.set_certificate( ParseDict( { 'domain_name_to_cert': { 'fl-test.com': { 'certs': parse_certificates(self.TEST_CERTIFICATES) } } }, CertificateStorage())) self.default_project.comment = 'test comment' db.session.add(self.default_project) workflow = Workflow(name='workflow_key_get1', project_id=1) db.session.add(workflow) db.session.commit()
def request_base_model(self): # Update the request to query for 1 Keras model params = copy.deepcopy(self.experiment_params) params["evolution"] = {"population_size": 1} params["LEAF"]["representation"] = "KerasNN" # Prepare a request for next generation request_params = { 'version': self.version, 'experiment_id': self.experiment_id } request = ParseDict(request_params, PopulationRequest()) request.config = self.extension_packaging.to_extension_bytes(params) # Ask for the base model response = self._next_population_with_retry(request) # Convert the received bytes to a Keras model model_bytes = response.population[0].interpretation model_file = io.BytesIO(model_bytes) keras_model = load_model(model_file) # return the base model return keras_model
def InitializeBackend(self, request, context): try: ret = backend.initializeBackend(MessageToDict(request), passby_model=None) return ParseDict(ret, c_pb2.ResultReply()) except CreateAndLoadModelError as e: return error_reply.error_msg(c_pb2, CreateAndLoadModelError, exception=e) except Exception as e: logging.exception(e) return error_reply.error_msg( c_pb2, RunTimeException, msg="failed to initialize backend: {}".format(repr(e)))
def create_lineage_info(train_event_dict, eval_event_dict, dataset_event_dict): """ Create parsed lineage info tuple. Args: train_event_dict (Union[dict, None]): The dict of train event. eval_event_dict (Union[dict, None]): The dict of evaluation event. dataset_event_dict (Union[dict, None]): The dict of dataset graph event. Returns: namedtuple, parsed lineage info. """ if train_event_dict is not None: train_event = summary_pb2.LineageEvent() ParseDict(train_event_dict, train_event) else: train_event = None if eval_event_dict is not None: eval_event = summary_pb2.LineageEvent() ParseDict(eval_event_dict, eval_event) else: eval_event = None if dataset_event_dict is not None: dataset_event = summary_pb2.LineageEvent() ParseDict(dataset_event_dict, dataset_event) else: dataset_event = None lineage_info = LineageInfo( train_lineage=train_event, eval_lineage=eval_event, dataset_graph=dataset_event, ) return lineage_info
def ListListings(self, page, size): res = self.es.search(index=self.index_name, doc_type=self.doc_type, body={ 'query': { 'match_all': {} }, }, size=size, from_=page) print("Got %d Hits:" % res['hits']['total']) for hit in res['hits']['hits']: print(hit["_source"]) yield ParseDict(hit["_source"], listings.Listing())
def send_information(skeletons): skeletons_pb = ParseDict(skeletons, ObjectAnnotations()) connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) channel = connection.channel() channel.queue_declare(queue='Receive_information') for skeleton in skeletons_pb.objects: parts = {} for part in skeleton.keypoints: parts[part.id] = (part.position.x, part.position.y, part.position.z) channel.basic_publish(exchange='', routing_key='Receive_information', body = json.dumps({'dict': skeletons}).encode('utf-8')) #channel.basic_consume(queue='Receive_information', on_message_callback=callback, auto_ack=True) #channel.start_consuming() #print("Enviado") connection.close()
def report_error_event(self, error_report): """Uses the GAX client to report the error. :type error_report: dict :param error_report: payload of the error report formatted according to https://cloud.google.com/error-reporting/docs/formatting-error-messages This object should be built using Use :meth:~`google.cloud.error_reporting.client._build_error_report` """ project_name = self._gax_api.project_path(self._project) error_report_payload = report_errors_service_pb2.ReportedErrorEvent() ParseDict(error_report, error_report_payload) self._gax_api.report_error_event(project_name, error_report_payload)
def createAndLoadModel(stub): load_info = { 'backend': {'impl': "tensorflow.frozen"}, 'model': { 'implhash': "226a7354795692913f24bee21b0cd387", 'version': "1", }, 'encrypted': 1, 'a64key': "gWUXbcs1LnW8/RZLkykyGvbncsql1ok4HEvtbI8GW3AwD+V80A+ugF9wWFztuTdtQzvso0TwJR4aaglWoorTbSwlfWpmRST0PGC0OG0xBUkZtBhoJsoctC+nav/R8UozqFPhUrHydjgZfe3RfclOPXqk7krll9SsyZsTEy/M3z/knUKWkUJImZBmOYSWUxbZEpka+Mhocuvnq/HqUaTe4sIPTaaN/tvwtAj4ZMZZz3n9+JNEleUeO/A4cJXRlCFWyrWDxcjkYmH4oIfQGFYrQGKh2mzKRZOSimncoTOqkpJh/IUIJMczqmK0L7OZ3xMIVKQIJ3XVFIPDWkOvcHNzeA==", 'pvtkey': "/home/ubuntu/jk-97/aiserving/src/serving/core/private.pem", } response = stub.CreateAndLoadModel( ParseDict(load_info, be_pb2.FullLoadRequest())) print("grpc.backend.createAndLoadModel >>>", response.code, response.msg) return response.msg
def _call_endpoint(self, api, json_body): endpoint, method = _METHOD_TO_INFO[api] response_proto = api.Response() # Convert json string to json dictionary, to pass to requests if json_body: json_body = json.loads(json_body) response = http_request(endpoint=endpoint, method=method, json=json_body, **self.http_request_kwargs) js_dict = json.loads(response.text) if 'error_code' in js_dict: raise RestException(js_dict) ParseDict(js_dict=js_dict, message=response_proto) return response_proto
def write_user_protobuf(fd, user): if isinstance(user, User): # The easy way out return fd.write(user.SerializeToString()) if isinstance(user, dict): new_user = User() ParseDict(user, new_user, ignore_unknown_fields=True) return write_user_protobuf(fd, user) # Mostly won't happen but support it nevertheless new_user = User() for field in User.DESCRIPTOR.fields_by_name: if hasattr(user, field): setattr(new_user, user, getattr(user, field)) return fd.write(new_user.SerializeToString())
def _span_attrs_to_pb(span_attr, proto_type): """ Convert a span attribute dict to protobuf, including Links, Attributes, TimeEvents. Args: span_attr (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ attr_pb = getattr(trace_pb2.Span, proto_type)() ParseDict(span_attr, attr_pb) return attr_pb
def construct_immediate_container(dir, files): objs = [] all_data = bytes() for file in files: with open(f'{dir}/{file}', 'rb') as f: data = f.read() l = len(data) all_data += data objs.append({'name': file, 'length': l}) message = io_pb2.Immediate.Container() ParseDict( { 'object': objs, 'content_some': base64.b64encode(all_data).decode() }, message) return MessageToJson(message)
def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae Performs "impedance matching" between the protobuf attrs and the keys expected in the JSON API. """ entry_pb = LogEntryPB.pb(LogEntryPB()) # NOTE: We assume ``mapping`` was created in ``Batch.commit`` # or ``Logger._make_entry_resource``. In either case, if # the ``protoPayload`` key is present, we assume that the # type URL is registered with ``google.protobuf`` and will # not cause any issues in the JSON->protobuf conversion # of the corresponding ``proto_payload`` in the log entry # (it is an ``Any`` field). ParseDict(mapping, entry_pb) return LogEntryPB(entry_pb)