def create_data_channel(self, remote_grpc_port): url = remote_grpc_port.api_service_descriptor.url if url not in self._data_channel_cache: with self._lock: if url not in self._data_channel_cache: logging.info('Creating client data channel for %s', url) # Options to have no limits (-1) on the size of the messages # received or sent over the data plane. The actual buffer size # is controlled in a layer above. channel_options = [("grpc.max_receive_message_length", -1), ("grpc.max_send_message_length", -1)] grpc_channel = None if self._credentials is None: grpc_channel = GRPCChannelFactory.insecure_channel( url, options=channel_options) else: grpc_channel = GRPCChannelFactory.secure_channel( url, self._credentials, options=channel_options) # Add workerId to the grpc channel grpc_channel = grpc.intercept_channel( grpc_channel, WorkerIdInterceptor(self._worker_id)) self._data_channel_cache[url] = GrpcClientDataChannel( beam_fn_api_pb2_grpc.BeamFnDataStub(grpc_channel)) return self._data_channel_cache[url]
def _grpc_data_channel_test(self, time_based_flush=False): if time_based_flush: data_servicer = data_plane.BeamFnDataServicer( data_buffer_time_limit_ms=100) else: data_servicer = data_plane.BeamFnDataServicer() worker_id = 'worker_0' data_channel_service = \ data_servicer.get_conn_by_worker_id(worker_id) server = grpc.server(UnboundedThreadPoolExecutor()) beam_fn_api_pb2_grpc.add_BeamFnDataServicer_to_server( data_servicer, server) test_port = server.add_insecure_port('[::]:0') server.start() grpc_channel = grpc.insecure_channel('localhost:%s' % test_port) # Add workerId to the grpc channel grpc_channel = grpc.intercept_channel( grpc_channel, WorkerIdInterceptor(worker_id)) data_channel_stub = beam_fn_api_pb2_grpc.BeamFnDataStub(grpc_channel) if time_based_flush: data_channel_client = data_plane.GrpcClientDataChannel( data_channel_stub, data_buffer_time_limit_ms=100) else: data_channel_client = data_plane.GrpcClientDataChannel(data_channel_stub) try: self._data_channel_test( data_channel_service, data_channel_client, time_based_flush) finally: data_channel_client.close() data_channel_service.close() data_channel_client.wait() data_channel_service.wait()
def create_data_channel_from_url(self, url): # type: (str) -> Optional[GrpcClientDataChannel] if not url: return None if url not in self._data_channel_cache: with self._lock: if url not in self._data_channel_cache: _LOGGER.info('Creating client data channel for %s', url) # Options to have no limits (-1) on the size of the messages # received or sent over the data plane. The actual buffer size # is controlled in a layer above. channel_options = [("grpc.max_receive_message_length", -1), ("grpc.max_send_message_length", -1)] grpc_channel = None if self._credentials is None: grpc_channel = GRPCChannelFactory.insecure_channel( url, options=channel_options) else: grpc_channel = GRPCChannelFactory.secure_channel( url, self._credentials, options=channel_options) # Add workerId to the grpc channel grpc_channel = grpc.intercept_channel( grpc_channel, WorkerIdInterceptor(self._worker_id)) self._data_channel_cache[url] = GrpcClientDataChannel( beam_fn_api_pb2_grpc.BeamFnDataStub(grpc_channel), self._data_buffer_time_limit_ms) return self._data_channel_cache[url]
def test_grpc_data_channel(self): data_servicer = data_plane.BeamFnDataServicer() worker_id = 'worker_0' data_channel_service = \ data_servicer.get_conn_by_worker_id(worker_id) server = grpc.server(futures.ThreadPoolExecutor(max_workers=2)) beam_fn_api_pb2_grpc.add_BeamFnDataServicer_to_server( data_servicer, server) test_port = server.add_insecure_port('[::]:0') server.start() grpc_channel = grpc.insecure_channel('localhost:%s' % test_port) # Add workerId to the grpc channel grpc_channel = grpc.intercept_channel(grpc_channel, WorkerIdInterceptor(worker_id)) data_channel_stub = beam_fn_api_pb2_grpc.BeamFnDataStub(grpc_channel) data_channel_client = data_plane.GrpcClientDataChannel( data_channel_stub) try: self._data_channel_test(data_channel_service, data_channel_client) finally: data_channel_client.close() data_channel_service.close() data_channel_client.wait() data_channel_service.wait()
def create_data_channel(self, remote_grpc_port): url = remote_grpc_port.api_service_descriptor.url if url not in self._data_channel_cache: logging.info('Creating channel for %s', url) grpc_channel = grpc.insecure_channel( url, # Options to have no limits (-1) on the size of the messages # received or sent over the data plane. The actual buffer size is # controlled in a layer above. options=[("grpc.max_receive_message_length", -1), ("grpc.max_send_message_length", -1)]) self._data_channel_cache[url] = GrpcClientDataChannel( beam_fn_api_pb2_grpc.BeamFnDataStub(grpc_channel)) return self._data_channel_cache[url]
def test_grpc_data_channel(self): data_channel_service = data_plane.GrpcServerDataChannel() server = grpc.server(futures.ThreadPoolExecutor(max_workers=2)) beam_fn_api_pb2_grpc.add_BeamFnDataServicer_to_server( data_channel_service, server) test_port = server.add_insecure_port('[::]:0') server.start() data_channel_stub = beam_fn_api_pb2_grpc.BeamFnDataStub( grpc.insecure_channel('localhost:%s' % test_port)) data_channel_client = data_plane.GrpcClientDataChannel(data_channel_stub) try: self._data_channel_test(data_channel_service, data_channel_client) finally: data_channel_client.close() data_channel_service.close() data_channel_client.wait() data_channel_service.wait()