Ejemplo n.º 1
0
    def __init__(self, http_host, http_port, http_port_retries, gcs_address,
                 redis_address, redis_password, log_dir):
        self.health_check_thread: GCSHealthCheckThread = None
        self._gcs_rpc_error_counter = 0
        # Public attributes are accessible for all head modules.
        # Walkaround for issue: https://github.com/ray-project/ray/issues/7084
        self.http_host = "127.0.0.1" if http_host == "localhost" else http_host
        self.http_port = http_port
        self.http_port_retries = http_port_retries

        if use_gcs_for_bootstrap():
            assert gcs_address is not None
            self.gcs_address = gcs_address
        else:
            self.redis_address = dashboard_utils.address_tuple(redis_address)
            self.redis_password = redis_password

        self.log_dir = log_dir
        self.aioredis_client = None
        self.aiogrpc_gcs_channel = None
        self.gcs_error_subscriber = None
        self.gcs_log_subscriber = None
        self.http_session = None
        self.ip = ray.util.get_node_ip_address()
        if not use_gcs_for_bootstrap():
            ip, port = redis_address.split(":")
        else:
            ip, port = gcs_address.split(":")

        self.server = aiogrpc.server(options=(("grpc.so_reuseport", 0), ))
        grpc_ip = "127.0.0.1" if self.ip == "127.0.0.1" else "0.0.0.0"
        self.grpc_port = ray._private.tls_utils.add_port_to_grpc_server(
            self.server, f"{grpc_ip}:0")
        logger.info("Dashboard head grpc address: %s:%s", grpc_ip,
                    self.grpc_port)
Ejemplo n.º 2
0
    def start_outer_server(self, port: str = None) -> grpc_aio.Server:
        outer_server = grpc_aio.server(compression=self._default_compression)
        target_host = f'[::]:{port}'
        self.add_server_port(outer_server, target_host)
        logging.debug(f"outer target host = {target_host}")

        return outer_server
Ejemplo n.º 3
0
    async def _serve(self, controller):
        ''' Server task '''

        # Setup server
        self._server = aio.server()
        switchml_pb2_grpc.add_SessionServicer_to_server(self, self._server)
        switchml_pb2_grpc.add_SyncServicer_to_server(self, self._server)
        self._server.add_insecure_port('{}:{}'.format(self.ip, self.port))

        # Lock to synchronize Barrier/Broadcast in case of reset
        self.lock = threading.RLock()

        ## Barrier
        # Incrementing operation id
        self._barrier_op_id = 0
        # Worker counters and release events
        self._barrier_ctrs = {self._barrier_op_id: 0}
        self._barrier_events = {self._barrier_op_id: asyncio.Event()}

        ## Broadcast
        # Op values, bitmap and release events
        self._bcast_values = []
        self._bcast_bitmap = []
        self._bcast_events = []

        # Controller
        self.ctrl = controller

        # Start gRPC server
        await self._server.start()
Ejemplo n.º 4
0
async def serve():
    server = aio.server()
    helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
    listen_addr = "[::]:50051"
    server.add_insecure_port(listen_addr)
    logging.info("Starting server on %s", listen_addr)
    await server.start()
    await server.wait_for_termination()
Ejemplo n.º 5
0
    def __init__(self,
                 node_ip_address,
                 redis_address,
                 dashboard_agent_port,
                 gcs_address,
                 redis_password=None,
                 temp_dir=None,
                 session_dir=None,
                 runtime_env_dir=None,
                 log_dir=None,
                 metrics_export_port=None,
                 node_manager_port=None,
                 listen_port=0,
                 object_store_name=None,
                 raylet_name=None,
                 logging_params=None):
        """Initialize the DashboardAgent object."""
        # Public attributes are accessible for all agent modules.
        self.ip = node_ip_address

        if use_gcs_for_bootstrap():
            assert gcs_address is not None
            self.gcs_address = gcs_address
        else:
            self.redis_address = dashboard_utils.address_tuple(redis_address)
            self.redis_password = redis_password
            self.aioredis_client = None
            self.gcs_address = None

        self.temp_dir = temp_dir
        self.session_dir = session_dir
        self.runtime_env_dir = runtime_env_dir
        self.log_dir = log_dir
        self.dashboard_agent_port = dashboard_agent_port
        self.metrics_export_port = metrics_export_port
        self.node_manager_port = node_manager_port
        self.listen_port = listen_port
        self.object_store_name = object_store_name
        self.raylet_name = raylet_name
        self.logging_params = logging_params
        self.node_id = os.environ["RAY_NODE_ID"]
        # TODO(edoakes): RAY_RAYLET_PID isn't properly set on Windows. This is
        # only used for fate-sharing with the raylet and we need a different
        # fate-sharing mechanism for Windows anyways.
        if sys.platform not in ["win32", "cygwin"]:
            self.ppid = int(os.environ["RAY_RAYLET_PID"])
            assert self.ppid > 0
            logger.info("Parent pid is %s", self.ppid)
        self.server = aiogrpc.server(options=(("grpc.so_reuseport", 0), ))
        grpc_ip = "127.0.0.1" if self.ip == "127.0.0.1" else "0.0.0.0"
        self.grpc_port = ray._private.tls_utils.add_port_to_grpc_server(
            self.server, f"{grpc_ip}:{self.dashboard_agent_port}")
        logger.info("Dashboard agent grpc address: %s:%s", grpc_ip,
                    self.grpc_port)
        options = (("grpc.enable_http_proxy", 0), )
        self.aiogrpc_raylet_channel = ray._private.utils.init_grpc_channel(
            f"{self.ip}:{self.node_manager_port}", options, asynchronous=True)
        self.http_session = None
Ejemplo n.º 6
0
async def serve():
    server = aio.server()
    wedge_pb2_grpc.add_WedgeServicer_to_server(Wedge(), server)

    listen_addr = '[::]:50051'
    server.add_insecure_port(listen_addr)
    logging.info("Starting server on %s", listen_addr)
    await server.start()
    await server.wait_for_termination()
Ejemplo n.º 7
0
 async def setUp(self):
     # Create async server
     self._server = aio.server(options=(('grpc.so_reuseport', 0), ))
     self._adhoc_handlers = AdhocGenericHandler()
     self._server.add_generic_rpc_handlers((self._adhoc_handlers, ))
     port = self._server.add_insecure_port('[::]:0')
     address = 'localhost:%d' % port
     await self._server.start()
     # Create async channel
     self._channel = aio.insecure_channel(address)
Ejemplo n.º 8
0
async def serve_async():
    class Greeter(helloworld_pb2_grpc.GreeterServicer):
        async def SayHello(self, request, context):
            await sleep_async(DELAY_TIME)
            return helloworld_pb2.HelloReply(message='Hello, %s!' %
                                             request.name)

    server = aio.server()
    helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
    server.add_insecure_port('[::]:50051')
    await server.start()
    await server.wait_for_termination()
Ejemplo n.º 9
0
    def _create_server(self):
        self._servicer = InferenceServicer(self._data_plane)
        self._server = aio.server(
            ThreadPoolExecutor(max_workers=self._settings.grpc_workers))

        add_GRPCInferenceServiceServicer_to_server(self._servicer,
                                                   self._server)

        self._server.add_insecure_port(
            f"{self._settings.host}:{self._settings.grpc_port}")

        return self._server
Ejemplo n.º 10
0
async def main(port, log_storage="/home/tbjc1magic/log"):
    server = aio.server()
    task_manager = TaskManager()
    task_manager_thread = asyncio.to_thread(task_manager.run)
    data_collector_service_pb2_grpc.add_DataCollectorServicer_to_server(
        DataCollectorServicer(log_storage, task_manager), server)
    service_names = (
        data_collector_service_pb2.DESCRIPTOR.
        services_by_name["DataCollector"].full_name,
        reflection.SERVICE_NAME,
    )
    reflection.enable_server_reflection(service_names, server)
    server.add_insecure_port(f"[::]:{port}")
    await server.start()
    await asyncio.gather(server.wait_for_termination(), task_manager_thread)
Ejemplo n.º 11
0
async def __run():
    global __server
    await __lock.acquire()
    __server = aio.server(
        ThreadPoolExecutor(max_workers=server_config.max_workers))
    route_pb2_grpc.add_UpdateServerRouteServicer_to_server(Greeter(), __server)
    __server.add_insecure_port(f'{server_config.host}:{server_config.port}')
    logging.info("gRPC 启动中")
    await __server.start()
    logging.info("gRPC 已启动")

    # 等待停止信号
    logging.info("启动 gRPC 进程运行阻塞锁(额外)")
    await __lock.acquire()
    logging.info("脱离 gRPC 进程运行阻塞锁(额外)")
    await __server.wait_for_termination()
    logging.info("脱离 gRPC 运行阻塞")
Ejemplo n.º 12
0
async def serve_order_book(port, data_collector_address="localhost:9999"):
    server = aio.server()
    order_book_manager = OrderBookManager(
        ["bnbbtc", "ethbusd", "dogeusdt"],
        save_data=True,
        data_collector_address=data_collector_address,
        server=server,
    )
    order_book_service_pb2_grpc.add_OrderBookServicer_to_server(
        OrderBookServier(order_book_manager), server)
    service_names = (
        order_book_service_pb2.DESCRIPTOR.services_by_name["OrderBook"].
        full_name,
        reflection.SERVICE_NAME,
    )
    reflection.enable_server_reflection(service_names, server)
    server.add_insecure_port(f"[::]:{port}")
    await server.start()
    await asyncio.gather(server.wait_for_termination(),
                         order_book_manager.run())
Ejemplo n.º 13
0
    def __init__(
        self,
        http_host,
        http_port,
        http_port_retries,
        gcs_address,
        log_dir,
        temp_dir,
        session_dir,
        minimal,
    ):
        self.minimal = minimal
        self.health_check_thread: GCSHealthCheckThread = None
        self._gcs_rpc_error_counter = 0
        # Public attributes are accessible for all head modules.
        # Walkaround for issue: https://github.com/ray-project/ray/issues/7084
        self.http_host = "127.0.0.1" if http_host == "localhost" else http_host
        self.http_port = http_port
        self.http_port_retries = http_port_retries

        self.gcs_address = None
        assert gcs_address is not None
        self.gcs_address = gcs_address
        self.log_dir = log_dir
        self.temp_dir = temp_dir
        self.session_dir = session_dir
        self.aiogrpc_gcs_channel = None
        self.gcs_error_subscriber = None
        self.gcs_log_subscriber = None
        self.ip = ray.util.get_node_ip_address()
        ip, port = gcs_address.split(":")

        self.server = aiogrpc.server(options=(("grpc.so_reuseport", 0), ))
        grpc_ip = "127.0.0.1" if self.ip == "127.0.0.1" else "0.0.0.0"
        self.grpc_port = ray._private.tls_utils.add_port_to_grpc_server(
            self.server, f"{grpc_ip}:0")
        logger.info("Dashboard head grpc address: %s:%s", grpc_ip,
                    self.grpc_port)
        # If the dashboard is started as non-minimal version, http server should
        # be configured to expose APIs.
        self.http_server = None
Ejemplo n.º 14
0
async def serve():
    port = os.getenv('SOCKS5_PORT', '9050')
    host = os.getenv('SOCKS5_HOST', '127.0.0.1')
    connector = create_tor_connector({
        'host':
        host,
        'port':
        port,
        'username':
        os.getenv('SOCKS5_USERNAME', ''),
        'password':
        os.getenv('SOCKS5_PASSWORD', '')
    })
    print("Attempting to connect to Tor on {host}:{port}".format(host=host,
                                                                 port=port))
    async with aiohttp.ClientSession(connector=connector) as session:
        print('Tor connected.')
        server = aio.server()
        scrape_pb2_grpc.add_ScraperServicer_to_server(ScraperService(session),
                                                      server)
        server.add_insecure_port("[::]:50051")
        print('Starting gRPC server on port 50051')
        await server.start()
        await server.wait_for_termination()
Ejemplo n.º 15
0
 def __init__(self, address, port):
     super().__init__()
     self.server = aio.server()
     self.server.add_insecure_port(f'{address}:{port}')
Ejemplo n.º 16
0
    def __init__(self):
        self.nodeid = 1
        #configuration
        # args  = config.config()
        # args.project_path = "./dataset/images"
        self.datapath = 'dataset'
        # data = ODMLoadDatasetStage(self.datapath , args, progress=5.0,
        #                                   verbose=args.verbose)

        #distance to include the image in meters
        self.include_distance = 10

        #run the dataset layer
        #data.run()

        print('sfm')
        print(self.datapath)

        #opensfm configuration

        opensfm_config = opensfm_interface.setup_opensfm_config(self.datapath)

        #extract metadata

        # camera_models = {}
        # current_path = '/home/j/ODM-master/grpc_stages/node1/'

        # ref_image = ['DJI_0019.JPG', 'DJI_0018.JPG','DJI_0020.JPG','DJI_0021.JPG','DJI_0022.JPG','DJI_0023.JPG','DJI_0024.JPG'
        # ,'DJI_0025.JPG','DJI_0026.JPG','DJI_0027.JPG','DJI_0028.JPG','DJI_0029.JPG','DJI_0030.JPG','DJI_0031.JPG','DJI_0032.JPG','DJI_0033.JPG','DJI_0034.JPG','DJI_0035.JPG']
        # cand_images = ['DJI_0019.JPG', 'DJI_0018.JPG','DJI_0020.JPG','DJI_0021.JPG','DJI_0022.JPG','DJI_0023.JPG','DJI_0024.JPG'
        # ,'DJI_0025.JPG','DJI_0026.JPG','DJI_0027.JPG','DJI_0028.JPG','DJI_0029.JPG','DJI_0030.JPG','DJI_0031.JPG','DJI_0032.JPG','DJI_0033.JPG','DJI_0034.JPG','DJI_0035.JPG']

        # for each in ref_image:
        #     d  = opensfm_interface.extract_metadata_image('/home/j/ODM-master/grpc_stages/node1/'+each, opensfm_config)
        #     if d['camera'] not in camera_models:
        #         camera = exif.camera_from_exif_metadata(d, opensfm_config)
        #         camera_models[d['camera']] = camera
        #     opensfm_interface.save_exif('/home/j/ODM-master/grpc_stages/node1/exif', each,d)
        # opensfm_interface.save_camera_models('/home/j/ODM-master/grpc_stages/node1/', camera_models)

        # #c  = opensfm_interface.extract_metadata_image('/home/j/ODM-master/grpc_stages/node1/DJI_0019.JPG', opensfm_config)

        # print(d)
        # print(camera_models)
        # #opensfm_interface.save_exif('/home/j/ODM-master/grpc_stages/node1/', 'DJI_0019.JPG', c)

        # #save the exif metadata to file in a folder
        # # send extracted metedata and camera model back

        # #feature extraction
        # for each in ref_image:
        #     opensfm_interface.detect(current_path+'features', current_path+each,each ,opensfm_config)

        # #opensfm_interface.detect(current_path+'features', current_path+'DJI_0019.JPG','DJI_0019.JPG' ,opensfm_config)

        # #feature matching

        # pairs_matches, preport = new_matching.match_images(current_path, ref_image, cand_images, opensfm_config)
        # new_matching.save_matches(current_path, ref_image, pairs_matches)
        # print('matching')

        # #create tracks first

        # features, colors = tracking.load_features(current_path+'features', ref_image, opensfm_config)
        # matches = tracking.load_matches(current_path, ref_image)
        # graph = tracking.create_tracks_graph(features, colors, matches,
        #                                      opensfm_config)

        # opensfm_interface.save_tracks_graph(graph, current_path)

        # #reconstruction

        # # load tracks graph

        # graph = opensfm_interface.load_tracks_graph(current_path)
        # report, reconstructions = reconstruction.incremental_reconstruction(current_path, graph, opensfm_config)

        # opensfm_interface.save_reconstruction(current_path,reconstructions)
        # #opensfm_interface.save_report(io.json_dumps(report), 'reconstruction.json')

        # outputs = {}
        # photos = []
        # from opendm import photo
        # from opendm import types

        # for each in ref_image:
        #     photos += [types.ODM_Photo(current_path+each)]

        # # get match image sizes
        # outputs['undist_image_max_size'] = max(
        #     gsd.image_max_size(photos, 5.0, current_path+'reconstruction.json'),
        #     0.1
        # )
        # print(outputs)

        # #undistort image dataset:

        # opensfm_interface.opensfm_undistort(current_path, opensfm_config)

        # #export visualsfm

        # opensfm_interface.open_export_visualsfm(current_path, opensfm_config)

        # #compute depthmaps

        # opensfm_interface.open_compute_depthmaps(current_path, opensfm_config)

        # #mve stage 1 makescene

        # #input compute depthmaps file

        # mve_file_path = '/home/j/ODM-master/grpc_stages/node1/mve'
        # nvm_file = '/home/j/ODM-master/grpc_stages/node1/undistorted/reconstruction.nvm'
        # mve_interface.mve_makescene(nvm_file, mve_file_path, 2)

        # #mve stage 2 dense reconstruction

        # mve_interface.mve_dense_recon(outputs['undist_image_max_size'], mve_file_path, 2)

        # #mve stage 3 scene2pset_path
        # mve_model = io.join_paths(mve_file_path, 'mve_dense_point_cloud.ply')
        # mve_interface.mve_scene2pset(mve_file_path, mve_model,outputs['undist_image_max_size'],2)

        # #mve stage 4 clean_mesh
        # mve_interface.mve_cleanmesh(0.6, mve_model, 2)

        # # filterpoint cloud
        # odm_filterpoints = '/home/j/ODM-master/grpc_stages/node1/filterpoints'
        # filterpoint_cloud = io.join_paths(odm_filterpoints, "point_cloud.ply")

        # filterpoint_interface.filter_points(odm_filterpoints, mve_model, filterpoint_cloud,2)

        # #meshing stage
        # odm_mesh_folder= '/home/j/ODM-master/grpc_stages/node1/mesh'
        # odm_mesh_ply = io.join_paths(odm_mesh_folder, "odm_mesh.ply")
        # mesh_interface.mesh_3d(odm_mesh_folder, odm_mesh_ply, filterpoint_cloud, 2)

        # #texturing stage

        # mvs_folder= '/home/j/ODM-master/grpc_stages/node1/mvs'
        # mvs_texturing.mvs_texturing(odm_mesh_ply, mvs_folder, nvm_file)

        #https://stackoverflow.com/questions/45071567/how-to-send-custom-header-metadata-with-python-grpc

        class Servicer(sendFile_pb2_grpc.FileServiceServicer):
            def __init__(self):
                self.dataset_dir = './dataset'
                self.tmp_file_name = './dataset2/IMG_2359.JPG'

            async def upload(self, request_iterator, context):
                # client uploads images to this node
                #request iterator is the file iterator through the chuncks

                #self.tmp_file_name is the name to save the file chucks to
                nodeid = ''
                filename = ''
                for key, value in context.invocation_metadata():

                    if (key == 'node-id'):
                        nodeid = value
                        #check if there is a dir for the node id
                        system.mkdir_p(nodeid)
                    if key == 'filename':
                        filename = value

                    print('Received initial metadata: key=%s value=%s' %
                          (key, value))
                #print(os.path.dirname(os.path.abspath(__file__)))
                if (nodeid != '' and filename != ''):
                    save_chunks_to_file(request_iterator,
                                        nodeid + '/images/' + filename)

                    return sendFile_pb2.UploadStatus(
                        Message=" Successul ",
                        Code=sendFile_pb2.UploadStatusCode.Ok)
                else:
                    print('bad node id and bad filename')
                    # reply = sendFile_pb2.UploadStatus()
                    # reply.Message = " Failed "
                    # reply.c
                    return sendFile_pb2.UploadStatus(
                        Message=" Failure ",
                        Code=sendFile_pb2.UploadStatusCode.Failed)

            def download(self, request, context):
                if request.name:
                    return get_file_chunks(self.tmp_file_name)

            def compute(self, request, context):
                taskName = request.taskName

                #metadata use image name
                #use node id

                #opensfm compute

                # detect feature

                #

                # if(taskName == "compute_image_feature"):

                # elif(taskName == 'compute_matching_two_images'):0

                # elif(taskName == 'compute_'):

                # elif(taskName == ''):

                return 0

        self.server = aio.server()

        #self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
        sendFile_pb2_grpc.add_FileServiceServicer_to_server(
            Servicer(), self.server)

        neighbor_ip = ['50001', '50002']
        self.has_neighbor = defaultdict(lambda: "Node not present.")

        # tuple (false as any response from neighbor, filelocation)
        neighbor_response = 0  #increment neighbors response as each neighbor respond
        for each in neighbor_ip:
            self.has_neighbor[each] = (False, "")

        self.leader = True
Ejemplo n.º 17
0
    def __init__(
        self,
        node_ip_address,
        dashboard_agent_port,
        gcs_address,
        minimal,
        temp_dir=None,
        session_dir=None,
        runtime_env_dir=None,
        log_dir=None,
        metrics_export_port=None,
        node_manager_port=None,
        listen_port=0,
        object_store_name=None,
        raylet_name=None,
        logging_params=None,
        disable_metrics_collection: bool = False,
    ):
        """Initialize the DashboardAgent object."""
        # Public attributes are accessible for all agent modules.
        self.ip = node_ip_address
        self.minimal = minimal

        assert gcs_address is not None
        self.gcs_address = gcs_address

        self.temp_dir = temp_dir
        self.session_dir = session_dir
        self.runtime_env_dir = runtime_env_dir
        self.log_dir = log_dir
        self.dashboard_agent_port = dashboard_agent_port
        self.metrics_export_port = metrics_export_port
        self.node_manager_port = node_manager_port
        self.listen_port = listen_port
        self.object_store_name = object_store_name
        self.raylet_name = raylet_name
        self.logging_params = logging_params
        self.node_id = os.environ["RAY_NODE_ID"]
        self.metrics_collection_disabled = disable_metrics_collection
        # TODO(edoakes): RAY_RAYLET_PID isn't properly set on Windows. This is
        # only used for fate-sharing with the raylet and we need a different
        # fate-sharing mechanism for Windows anyways.
        if sys.platform not in ["win32", "cygwin"]:
            self.ppid = int(os.environ["RAY_RAYLET_PID"])
            assert self.ppid > 0
            logger.info("Parent pid is %s", self.ppid)

        # Setup raylet channel
        options = ray_constants.GLOBAL_GRPC_OPTIONS
        self.aiogrpc_raylet_channel = ray._private.utils.init_grpc_channel(
            f"{self.ip}:{self.node_manager_port}", options, asynchronous=True)

        # Setup grpc server
        self.server = aiogrpc.server(options=(("grpc.so_reuseport", 0), ))
        grpc_ip = "127.0.0.1" if self.ip == "127.0.0.1" else "0.0.0.0"
        try:
            self.grpc_port = ray._private.tls_utils.add_port_to_grpc_server(
                self.server, f"{grpc_ip}:{self.dashboard_agent_port}")
        except Exception:
            # TODO(SongGuyang): Catch the exception here because there is
            # port conflict issue which brought from static port. We should
            # remove this after we find better port resolution.
            logger.exception(
                "Failed to add port to grpc server. Agent will stay alive but "
                "disable the grpc service.")
            self.server = None
            self.grpc_port = None
        else:
            logger.info("Dashboard agent grpc address: %s:%s", grpc_ip,
                        self.grpc_port)

        # If the agent is started as non-minimal version, http server should
        # be configured to communicate with the dashboard in a head node.
        self.http_server = None

        # Used by the agent and sub-modules.
        # TODO(architkulkarni): Remove gcs_client once the agent exclusively uses
        # gcs_aio_client and not gcs_client.
        self.gcs_client = GcsClient(address=self.gcs_address)
        _initialize_internal_kv(self.gcs_client)
        assert _internal_kv_initialized()
        self.gcs_aio_client = GcsAioClient(address=self.gcs_address)
        self.publisher = GcsAioPublisher(address=self.gcs_address)