Пример #1
0
 def setUp(self):
     loop = asyncio.new_event_loop()
     asyncio.set_event_loop(loop)
     self._loop = loop
     self._proxy_client = ControlProxyHttpClient()
     ServiceRegistry._REGISTRY = {
         "services": {
             "mobilityd":
             {
                 "ip_address": "0.0.0.0",
                 "port": 3456,
             },
         },
     }
     ServiceRegistry.add_service('test', '0.0.0.0', 0)
     self._req_body = GatewayRequest(
         gwId="test id", authority='mobilityd',
         path='/magma.MobilityService'
              '/ListAddedIPv4Blocks',
         headers={
             'te': 'trailers',
             'content-type':
                 'application/grpc',
             'user-agent':
                 'grpc-python/1.4.0',
             'grpc-accept-encoding':
                 'identity',
         },
         payload=bytes.fromhex('0000000000'),
     )
Пример #2
0
 def __init__(self, loop, response_timeout):
     threading.Thread.__init__(self)
     # a synchronized queue
     self._response_queue = queue.Queue()
     self._loop = loop
     asyncio.set_event_loop(self._loop)
     # seconds to wait for an actual SyncRPCResponse to become available
     # before sending out a heartBeat
     self._response_timeout = response_timeout
     self._proxy_client = ControlProxyHttpClient()
     self.daemon = True
     self._current_delay = 0
     self._last_conn_time = 0
Пример #3
0
 def __init__(self, loop, response_timeout: int,
              print_grpc_payload: bool = False):
     threading.Thread.__init__(self)
     # a synchronized queue
     self._response_queue = queue.Queue()
     self._loop = loop
     asyncio.set_event_loop(self._loop)
     # seconds to wait for an actual SyncRPCResponse to become available
     # before sending out a heartBeat
     self._response_timeout = response_timeout
     self._proxy_client = ControlProxyHttpClient()
     self.daemon = True
     self._current_delay = 0
     self._last_conn_time = 0
     self._conn_closed_table = {}  # mapping of req id -> conn closed
     self._print_grpc_payload = print_grpc_payload
Пример #4
0
class ProxyClientTests(unittest.TestCase):
    """
    Tests for the ProxyClient.
    """

    def setUp(self):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        self._loop = loop
        self._proxy_client = ControlProxyHttpClient()
        ServiceRegistry._REGISTRY = {"services": {"mobilityd":
                                                  {"ip_address": "0.0.0.0",
                                                   "port": 3456}}
                                     }
        ServiceRegistry.add_service('test', '0.0.0.0', 0)
        self._req_body = GatewayRequest(gwId="test id", authority='mobilityd',
                                        path='/magma.MobilityService'
                                             '/ListAddedIPv4Blocks',
                                        headers={'te': 'trailers',
                                                 'content-type':
                                                     'application/grpc',
                                                 'user-agent':
                                                     'grpc-python/1.4.0',
                                                 'grpc-accept-encoding':
                                                     'identity'},
                                        payload=bytes.fromhex('0000000000'))

    @unittest.mock.patch('aioh2.open_connection')
    def test_http_client(self, mock_conn):
        req_body = GatewayRequest(gwId="test id", authority='mobilityd',
                                  path='/magma.MobilityService'
                                       '/ListAddedIPv4Blocks',
                                  headers={'te': 'trailers',
                                           'content-type': 'application/grpc',
                                           'user-agent': 'grpc-python/1.4.0',
                                           'grpc-accept-encoding': 'identity'},
                                  payload=bytes.fromhex('0000000000'))
        expected_payload = \
            b'\x00\x00\x00\x00\n\n\x08\x12\x04\xc0\xa8\x80\x00\x18\x18'
        expected_header = [(':status', '200'),
                           ('content-type', 'application/grpc')]
        expected_trailers = [('grpc-status', '0'), ('grpc-message', '')]

        mock_conn.side_effect = asyncio.coroutine(
            unittest.mock.MagicMock(
                return_value=MockClient(expected_payload, expected_header,
                                        expected_trailers, req_body)))
        future = asyncio.ensure_future(
            self._proxy_client.send(self._req_body))

        try:
            self._loop.run_until_complete(future)
            res = future.result()
            self.assertEqual(res.status,
                             '200')
            self.assertEqual(res.payload, expected_payload)
        except Exception as e:  # pylint: disable=broad-except
            self.fail(e)
        self._loop.close()
Пример #5
0
class SyncRPCClient(threading.Thread):
    """
    SyncRPCClient initiates a SyncRPCClient, and opens a bidirectional stream
    with the cloud.
    """

    RETRY_MAX_DELAY_SECS = 10  # seconds

    def __init__(self, loop, response_timeout: int):
        threading.Thread.__init__(self)
        # a synchronized queue
        self._response_queue = queue.Queue()
        self._loop = loop
        asyncio.set_event_loop(self._loop)
        # seconds to wait for an actual SyncRPCResponse to become available
        # before sending out a heartBeat
        self._response_timeout = response_timeout
        self._proxy_client = ControlProxyHttpClient()
        self.daemon = True
        self._current_delay = 0
        self._last_conn_time = 0
        self._conn_closed_table = {}  # mapping of req id -> conn closed

    def run(self):
        """
        This is executed when the thread is started. It gets a connection to
        the cloud dispatcher, and calls its bidirectional streaming rpc
        EstablishSyncRPCStream(). process_streams should never return, and
        if it did, exception will be logged, and new connection to dispatcher
        will be attempted after RETRY_DELAY_SECS seconds.
        """
        while True:
            try:
                start_time = time.time()
                chan = ServiceRegistry.get_rpc_channel('dispatcher',
                                                       ServiceRegistry.CLOUD)
                client = SyncRPCServiceStub(chan)
                self._set_connect_time()
                self.process_streams(client)
            except grpc.RpcError as err:
                if is_grpc_error_retryable(err):
                    logging.error(
                        "[SyncRPC] Transient gRPC error, retrying: %s",
                        err.details())
                    self._retry_connect_sleep()
                    continue
                else:
                    logging.error(
                        "[SyncRPC] gRPC error: %s, reconnecting to "
                        "cloud.", err.details())
                    self._cleanup_and_reconnect()
            except Exception as exp:  # pylint: disable=broad-except
                conn_time = time.time() - start_time
                logging.error("[SyncRPC] Error after %ds: %s", conn_time, exp)
                self._cleanup_and_reconnect()

    def process_streams(self, client: SyncRPCServiceStub) -> None:
        """
        Calls rpc function EstablishSyncRPCStream on the client to establish
        a stream with dispatcher in the cloud, processes all requests from
        the stream, and writes all responses to the stream.
        Args:
            client: a grpc client to dispatcher in the cloud.
        Returns:
            Should only return when an exception is encountered.
        """

        # call to bidirectional streaming grpc takes in an iterator,
        # and returns an iterator
        sync_rpc_requests = client.EstablishSyncRPCStream(
            self.send_sync_rpc_response())
        magmad_events.established_sync_rpc_stream()
        # forward incoming requests from cloud to control_proxy
        self.forward_requests(sync_rpc_requests)

    def send_sync_rpc_response(self):
        """
        Retrieve SyncRPCResponse from queue. If no response is available yet,
        block for at most response_timeout seconds, and send a heartBeat if
        timeout.
        Returns: A generator of SyncRPCResponse
        """
        while True:
            try:
                resp = self._response_queue.get(block=True,
                                                timeout=self._response_timeout)
                yield resp
            except queue.Empty:
                # response_queue is empty, send heartbeat
                # as the function itself has no knowledge on when it's
                # the first time it's called
                # this heartbeat response could be periodically called
                logging.debug("[SyncRPC] Sending heartbeat")
                yield SyncRPCResponse(heartBeat=True)

    def forward_requests(self,
                         sync_rpc_requests: List[SyncRPCRequest]) -> None:
        """
        Send requests in the sync_rpc_requests iterator.
        Args:
            sync_rpc_requests: an iterator of SyncRPCRequest from cloud

        Returns: Should only return when server shuts the stream (reaches
        end of the iterator sync_rpc_requests, or encounters an error)

        """
        logging.info("[SyncRPC] Waiting for requests")
        while True:
            try:
                req = next(sync_rpc_requests)
                self.forward_request(req)
            except grpc.RpcError as err:
                logging.error("[SyncRPC] Failing to forward request, err: %s",
                              err.details())
                raise err

    def forward_request(self, request: SyncRPCRequest) -> None:
        if request.heartBeat:
            logging.info("[SyncRPC] Got heartBeat from cloud")
            return

        if request.connClosed:
            logging.debug("[SyncRPC] Got connClosed from cloud")
            self._conn_closed_table[request.reqId] = True
            return

        logging.debug("[SyncRPC] Got a request")
        asyncio.run_coroutine_threadsafe(
            self._proxy_client.send(request.reqBody, request.reqId,
                                    self._response_queue,
                                    self._conn_closed_table), self._loop)

    def _retry_connect_sleep(self) -> None:
        """
        Sleep for a current delay amount of time, with random backoff
        If current delay is less than RETRY_MAX_DELAY_SECS, exponentially
        increase current delay. If it exceeds RETRY_MAX_DELAY_SECS, sleep for
        RETRY_MAX_DELAY_SECS
        """
        sleep_time = self._current_delay + (random.randint(0, 1000) / 1000)
        self._current_delay = min(2 * self._current_delay,
                                  self.RETRY_MAX_DELAY_SECS)
        self._current_delay = max(self._current_delay, 1)
        time.sleep(sleep_time)

    def _set_connect_time(self) -> None:
        logging.info("[SyncRPC] Opening stream to cloud")
        self._current_delay = 0
        self._last_conn_time = time.time()

    def _cleanup_and_reconnect(self):
        """
        If the connection is terminated, wait for a period of time
        before connecting back to the cloud. Also clear the conn
        closed table since cloud may reuse req IDs, and clear
        current proxy client connections
        """
        self._conn_closed_table.clear()
        self._proxy_client.close_all_connections()
        self._retry_connect_sleep()
        magmad_events.disconnected_sync_rpc_stream()
Пример #6
0
class ProxyClientTests(unittest.TestCase):
    """
    Tests for the ProxyClient.
    """

    def setUp(self):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        self._loop = loop
        self._proxy_client = ControlProxyHttpClient()
        ServiceRegistry._REGISTRY = {"services": {"mobilityd":
                                                  {"ip_address": "0.0.0.0",
                                                   "port": 3456}}
                                     }
        ServiceRegistry.add_service('test', '0.0.0.0', 0)
        self._req_body = GatewayRequest(gwId="test id", authority='mobilityd',
                                        path='/magma.MobilityService'
                                             '/ListAddedIPv4Blocks',
                                        headers={'te': 'trailers',
                                                 'content-type':
                                                     'application/grpc',
                                                 'user-agent':
                                                     'grpc-python/1.4.0',
                                                 'grpc-accept-encoding':
                                                     'identity'},
                                        payload=bytes.fromhex('0000000000'))

    @unittest.mock.patch('aioh2.open_connection')
    def test_http_client_unary(self, mock_conn):
        req_body = GatewayRequest(gwId="test id", authority='mobilityd',
                                  path='/magma.MobilityService'
                                       '/ListAddedIPv4Blocks',
                                  headers={'te': 'trailers',
                                           'content-type': 'application/grpc',
                                           'user-agent': 'grpc-python/1.4.0',
                                           'grpc-accept-encoding': 'identity'},
                                  payload=bytes.fromhex('0000000000'))
        expected_payload = \
            b'\x00\x00\x00\x00\n\n\x08\x12\x04\xc0\xa8\x80\x00\x18\x18'
        expected_header = [(':status', '200'),
                           ('content-type', 'application/grpc')]
        expected_trailers = [('grpc-status', '0'), ('grpc-message', '')]

        mock_conn.side_effect = asyncio.coroutine(
            unittest.mock.MagicMock(
                return_value=MockUnaryClient(expected_payload, expected_header,
                                             expected_trailers, req_body)))

        request_queue = queue.Queue()
        conn_closed_table = {
            1234: False
        }

        future = asyncio.ensure_future(
            self._proxy_client.send(self._req_body,
                                    1234,
                                    request_queue,
                                    conn_closed_table))

        self._loop.run_until_complete(future)

        self.assertEqual(request_queue.qsize(), 1)
        res = request_queue.get()
        self.assertEqual(res.reqId, 1234)
        self.assertEqual(res.heartBeat, False)
        self.assertEqual(res.respBody.status, '200')
        self.assertEqual(res.respBody.payload, expected_payload)
        self.assertEqual(res.respBody.headers['grpc-status'], '0')
        self._loop.close()

    @unittest.mock.patch('aioh2.open_connection')
    def test_http_client_stream(self, mock_conn):
        req_body = GatewayRequest(gwId="test id", authority='mobilityd',
                                  path='/magma.MobilityService'
                                       '/ListAddedIPv4Blocks',
                                  headers={'te': 'trailers',
                                           'content-type': 'application/grpc',
                                           'user-agent': 'grpc-python/1.4.0',
                                           'grpc-accept-encoding': 'identity'},
                                  payload=bytes.fromhex('0000000000'))
        expected_payload = \
            b'\x00\x00\x00\x00\n\n\x08\x12\x04\xc0\xa8\x80\x00\x18\x18'
        expected_header = [(':status', '200'),
                           ('content-type', 'application/grpc')]
        expected_trailers = [('grpc-status', '0'), ('grpc-message', '')]

        mock_conn.side_effect = asyncio.coroutine(
            unittest.mock.MagicMock(
                return_value=MockStreamingClient(
                    expected_payload,
                    expected_header,
                    expected_trailers,
                    req_body
                )
            )
        )

        request_queue = queue.Queue()
        conn_closed_table = {
            1234: False
        }

        future = asyncio.ensure_future(
            self._proxy_client.send(self._req_body,
                                    1234,
                                    request_queue,
                                    conn_closed_table))

        self._loop.run_until_complete(future)

        self.assertEqual(request_queue.qsize(), 2)
        res_1 = request_queue.get(timeout=0)
        self.assertEqual(res_1.reqId, 1234)
        self.assertEqual(res_1.heartBeat, False)
        self.assertEqual(res_1.respBody.status, '200')
        self.assertEqual(res_1.respBody.payload, expected_payload)
        self.assertTrue('grpc-status' not in res_1.respBody.headers)
        res_2 = request_queue.get()
        self.assertEqual(res_2.reqId, 1234)
        self.assertEqual(res_2.heartBeat, False)
        self.assertEqual(res_2.respBody.status, '200')
        self.assertEqual(res_2.respBody.payload, expected_payload)
        self.assertEqual(res_2.respBody.headers['grpc-status'], '0')
        self._loop.close()
Пример #7
0
class SyncRPCClient(threading.Thread):
    """
    SyncRPCClient initiates a SyncRPCClient, and opens a bidirectional stream
    with the cloud.
    """

    RETRY_MAX_DELAY_SECS = 10  # seconds

    def __init__(self, loop, response_timeout):
        threading.Thread.__init__(self)
        # a synchronized queue
        self._response_queue = queue.Queue()
        self._loop = loop
        asyncio.set_event_loop(self._loop)
        # seconds to wait for an actual SyncRPCResponse to become available
        # before sending out a heartBeat
        self._response_timeout = response_timeout
        self._proxy_client = ControlProxyHttpClient()
        self.daemon = True
        self._current_delay = 0
        self._last_conn_time = 0

    def run(self):
        """
        This is executed when the thread is started. It gets a connection to
        the cloud dispatcher, and calls its bidirectional streaming rpc
        EstablishSyncRPCStream(). process_streams should never return, and
        if it did, exception will be logged, and new connection to dispatcher
        will be attempted after RETRY_DELAY_SECS seconds.
        """
        while True:
            try:
                start_time = time.time()
                chan = ServiceRegistry.get_rpc_channel('dispatcher',
                                                       ServiceRegistry.CLOUD)
                client = SyncRPCServiceStub(chan)
                self._set_connect_time()
                self.process_streams(client)
            except Exception as exp:  # pylint: disable=broad-except
                conn_time = time.time() - start_time
                logging.error("[SyncRPC] Error after %ds: %s", conn_time, exp)
            # If the connection is terminated, wait for a period of time
            # before connecting back to the cloud.
            self._retry_connect_sleep()

    def process_streams(self, client):
        """
        Calls rpc function EstablishSyncRPCStream on the client to establish
        a stream with dispatcher in the cloud, processes all requests from
        the stream, and writes all responses to the stream.
        Args:
            client: a grpc client to dispatcher in the cloud.
        Returns:
            Should only return when an exception is encountered.
        """

        # call to bidirectional streaming grpc takes in an iterator,
        # and returns an iterator
        sync_rpc_requests = client.EstablishSyncRPCStream(
            self.send_sync_rpc_response())
        # forward incoming requests from cloud to control_proxy
        self.forward_requests(sync_rpc_requests)

    def send_sync_rpc_response(self):
        """
        Retrieve SyncRPCResponse from queue. If no response is available yet,
        block for at most response_timeout seconds, and send a heartBeat if
        timeout.
        Returns: A generator of SyncRPCResponse
        """
        while True:
            try:
                resp = self._response_queue.get(block=True,
                                                timeout=self._response_timeout)
                logging.debug("[SyncRPC] Sending response")
                yield resp
            except queue.Empty:
                # response_queue is empty, send heartbeat
                # as the function itself has no knowledge on when it's
                # the first time it's called
                # this heartbeat response could be periodically called
                logging.debug("[SyncRPC] Sending heartbeat")
                yield SyncRPCResponse(heartBeat=True)

    def forward_requests(self, sync_rpc_requests):
        """
        Send requests in the sync_rpc_requests iterator.
        Args:
            sync_rpc_requests: an iterator of SyncRPCRequest from cloud

        Returns: Should only return when server shuts the stream (reaches
        end of the iterator sync_rpc_requests, or encounters an error)

        """
        try:
            while True:
                logging.debug("[SyncRPC] Waiting for requests")
                req = next(sync_rpc_requests)
                self.forward_request(req)
        except grpc.RpcError as err:
            # server end closed connection; retry rpc connection.
            raise Exception("Error when retrieving request: [%s] %s" %
                            (err.code(), err.details()))

    def forward_request(self, request):
        if request.heartBeat:
            logging.info("[SyncRPC] Got heartBeat from cloud")
            return
        try:
            logging.debug("[SyncRPC] Got a request")
            future = asyncio.run_coroutine_threadsafe(
                self._proxy_client.send(request.reqBody), self._loop)
            future.add_done_callback(
                lambda fut: self._loop.call_soon_threadsafe(
                    self.send_request_done, request.reqId, fut))
        except Exception as exp:  # pylint: disable=broad-except
            logging.error("[SyncRPC] Error when forwarding request: %s", exp)

    def send_request_done(self, req_id, future):
        """
        A future that has a GatewayResponse is done. Check if a exception is
        raised. If so, log the error and enqueue an empty SyncRPCResponse.
        Else, enqueue a SyncRPCResponse that contains the GatewayResponse that
        became available in the future.
        Args:
            req_id: request id that's associated with the response
            future: A future that contains a GatewayResponse that is done.

        Returns: None

        """
        err = future.exception()
        if err:
            logging.error("[SyncRPC] Forward to control proxy error: %s", err)
            self._response_queue.put(
                SyncRPCResponse(heartBeat=False,
                                reqId=req_id,
                                respBody=GatewayResponse(err=str(err))))
        else:
            res = future.result()
            self._response_queue.put(
                SyncRPCResponse(heartBeat=False, reqId=req_id, respBody=res))

    def _retry_connect_sleep(self):
        """
        Sleep for a current delay amount of time.
        If last connection time was over 60 seconds ago, sleep for 0 seconds.
        If current delay is less than RETRY_MAX_DELAY_SECS, exponentially
        increase current delay. If it exceeds RETRY_MAX_DELAY_SECS, sleep for
        RETRY_MAX_DELAY_SECS
        """
        # if last connect time was over 60 secs ago, reset current_delay to 0
        if time.time() - self._last_conn_time > 60:
            self._current_delay = 0
        elif self._current_delay == 0:
            self._current_delay = 1
        else:
            self._current_delay = min(2 * self._current_delay,
                                      self.RETRY_MAX_DELAY_SECS)
        time.sleep(self._current_delay)

    def _set_connect_time(self):
        logging.info("[SyncRPC] Opening stream to cloud")
        self._last_conn_time = time.time()