def __init__(self): super(NeutronRestProxyV2, self).__init__() LOG.info(_LI('NeutronRestProxy: Starting plugin. Version=%s'), version.version_string_with_vcs()) pl_config.register_config() self.evpool = eventlet.GreenPool(cfg.CONF.RESTPROXY.thread_pool_size) # Include the Big Switch Extensions path in the api_extensions neutron_extensions.append_api_extensions_path(extensions.__path__) self.add_meta_server_route = cfg.CONF.RESTPROXY.add_meta_server_route # init network ctrl connections self.servers = servermanager.ServerPool() self.servers.get_topo_function = self._get_all_data self.servers.get_topo_function_args = {'get_ports': True, 'get_floating_ips': True, 'get_routers': True, 'get_sgs': True} self.network_scheduler = importutils.import_object( cfg.CONF.network_scheduler_driver ) # setup rpc for security and DHCP agents self._setup_rpc() if cfg.CONF.RESTPROXY.sync_data: self._send_all_data_auto() self.add_periodic_dhcp_agent_status_check() LOG.debug("NeutronRestProxyV2: initialization done")
def initialize(self): LOG.debug('Initializing driver') # register plugin config opts pl_config.register_config() self.evpool = eventlet.GreenPool(cfg.CONF.RESTPROXY.thread_pool_size) LOG.debug("Force topology sync if consistency hash is empty") hash_handler = cdb.HashHandler() cur_hash = hash_handler.read_for_update() if not cur_hash: hash_handler.put_hash('intial:hash,code') LOG.debug("Force topology sync Done") # init network ctrl connections self.servers = servermanager.ServerPool() self.servers.get_topo_function = self._get_all_data_auto self.segmentation_types = ', '.join(cfg.CONF.ml2.type_drivers) # Track hosts running IVS to avoid excessive calls to the backend self.vswitch_host_cache = {} self.setup_sg_rpc_callbacks() self.unsupported_vnic_types = [ portbindings.VNIC_DIRECT, portbindings.VNIC_DIRECT_PHYSICAL ] LOG.debug("Initialization done")
def test_ipv6_server_address(self): cfg.CONF.set_override('servers', ['[ABCD:EF01:2345:6789:ABCD:EF01:2345:6789]:80'], 'RESTPROXY') s = servermanager.ServerPool() self.assertEqual(s.servers[0].server, 'ABCD:EF01:2345:6789:ABCD:EF01:2345:6789')
def test_connect_failures(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON, return_value=None): resp = sp.servers[0].rest_call('GET', '/') self.assertEqual(resp, (0, None, None, None)) # verify same behavior on ssl class sp.servers[0].currentcon = False sp.servers[0].ssl = True with mock.patch(HTTPSCON, return_value=None): resp = sp.servers[0].rest_call('GET', '/') self.assertEqual(resp, (0, None, None, None))
def test_auth_header(self): cfg.CONF.set_override('server_auth', 'username:pass', 'RESTPROXY') sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value rv.getresponse.return_value.getheader.return_value = 'HASHHEADER' sp.rest_create_network('tenant', 'network') callheaders = rv.request.mock_calls[0][1][3] self.assertIn('Authorization', callheaders) self.assertEqual(callheaders['Authorization'], 'Basic dXNlcm5hbWU6cGFzcw==')
def test_capabilities_retrieval_failure(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value.getresponse.return_value rv.getheader.return_value = 'HASHHEADER' # a failure to parse should result in an empty capability set rv.read.return_value = 'XXXXX' self.assertEqual([], sp.servers[0].get_capabilities()) # One broken server should affect all capabilities rv.read.side_effect = ['{"a": "b"}', '["b","c","d"]'] self.assertEqual(set(), sp.get_capabilities())
def test_header_add(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value rv.getresponse.return_value.getheader.return_value = 'HASHHEADER' sp.servers[0].rest_call('GET', '/', headers={'EXTRA-HEADER': 'HI'}) callheaders = rv.request.mock_calls[0][1][3] # verify normal headers weren't mangled self.assertIn('Content-type', callheaders) self.assertEqual(callheaders['Content-type'], 'application/json') # verify new header made it in self.assertIn('EXTRA-HEADER', callheaders) self.assertEqual(callheaders['EXTRA-HEADER'], 'HI')
def test_reconnect_on_timeout_change(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value rv.getresponse.return_value.getheader.return_value = 'HASHHEADER' sp.servers[0].capabilities = ['keep-alive'] sp.servers[0].rest_call('GET', '/', timeout=10) # even with keep-alive enabled, a change in timeout will trigger # a reconnect sp.servers[0].rest_call('GET', '/', timeout=75) conmock.assert_has_calls([ mock.call('localhost', 9000, timeout=10), mock.call('localhost', 9000, timeout=75), ], any_order=True)
def test_capabilities_retrieval(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value.getresponse.return_value rv.getheader.return_value = 'HASHHEADER' # each server will get different capabilities rv.read.side_effect = ['["a","b","c"]', '["b","c","d"]'] # pool capabilities is intersection between both self.assertEqual(set(['b', 'c']), sp.get_capabilities()) self.assertEqual(2, rv.read.call_count) # the pool should cache after the first call so no more # HTTP calls should be made rv.read.side_effect = ['["w","x","y"]', '["x","y","z"]'] self.assertEqual(set(['b', 'c']), sp.get_capabilities()) self.assertEqual(2, rv.read.call_count)
def test_no_reconnect_recurse_to_infinity(self): self.skipTest("cached connections are currently disabled because " "their assignment to the servermanager object is not " "thread-safe") # retry uses recursion when a reconnect is necessary # this test makes sure it stops after 1 recursive call sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value # hash header must be string instead of mock object rv.getresponse.return_value.getheader.return_value = 'HASH' sp.servers[0].capabilities = ['keep-alive'] sp.servers[0].rest_call('GET', '/first') # after retrying once, the rest call should raise the # exception up rv.request.side_effect = httplib.ImproperConnectionState() self.assertRaises(httplib.ImproperConnectionState, sp.servers[0].rest_call, *('GET', '/second')) # 1 for the first call, 2 for the second with retry self.assertEqual(rv.request.call_count, 3)
def test_reconnect_cached_connection(self): self.skipTest("cached connections are currently disabled because " "their assignment to the servermanager object is not " "thread-safe") sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: rv = conmock.return_value rv.getresponse.return_value.getheader.return_value = 'HASH' sp.servers[0].capabilities = ['keep-alive'] sp.servers[0].rest_call('GET', '/first') # raise an error on re-use to verify reconnect # return okay the second time so the reconnect works rv.request.side_effect = [ httplib.ImproperConnectionState(), mock.MagicMock() ] sp.servers[0].rest_call('GET', '/second') uris = [c[1][1] for c in rv.request.mock_calls] expected = [ sp.base_uri + '/first', sp.base_uri + '/second', sp.base_uri + '/second', ] self.assertEqual(uris, expected)
def test_socket_error(self): sp = servermanager.ServerPool() with mock.patch(HTTPCON) as conmock: conmock.return_value.request.side_effect = socket.timeout() resp = sp.servers[0].rest_call('GET', '/') self.assertEqual(resp, (0, None, None, None))