def __init__(self): super(AmphoraProviderDriver, self).__init__() self.target = messaging.Target( namespace=consts.RPC_NAMESPACE_CONTROLLER_AGENT, topic=consts.TOPIC_AMPHORA_V2, version="2.0", fanout=False) self.client = rpc.get_client(self.target) self.repositories = repositories.Repositories()
def __init__(self): super(BaseController, self).__init__() self.repositories = repositories.Repositories() self.handler = stevedore_driver.DriverManager( namespace='octavia.api.handlers', name=CONF.api_settings.api_handler, invoke_on_load=True).driver
def batch_update(self, old_member_ids, new_member_ids, updated_members): for m in updated_members: validate_input(data_models.Member, m) LOG.info( "%(entity)s handling the batch update of members: " "old=%(old)s, new=%(new)s", { "entity": self.__class__.__name__, "old": old_member_ids, "new": new_member_ids }) repo = repos.Repositories() old_members = [ repo.member.get(db_api.get_session(), id=mid) for mid in old_member_ids ] new_members = [ repo.member.get(db_api.get_session(), id=mid) for mid in new_member_ids ] all_members = [] all_members.extend(old_members) all_members.extend(new_members) all_members.extend(updated_members) simulate_controller(all_members, batch_update=True)
def __init__(self): super(A10ProviderDriver, self).__init__() self.target = messaging.Target( namespace=constants.RPC_NAMESPACE_CONTROLLER_AGENT, topic='a10_octavia', version='1.0', fanout=False) self.client = rpc.get_client(self.target) self.repositories = repositories.Repositories()
def __init__(self, **kwargs): self.repos = repo.Repositories() self.amphora_repo = repo.AmphoraRepository() self.health_mon_repo = repo.HealthMonitorRepository() self.listener_repo = repo.ListenerRepository() self.loadbalancer_repo = repo.LoadBalancerRepository() self.member_repo = repo.MemberRepository() self.pool_repo = repo.PoolRepository() super(BaseDatabaseTask, self).__init__(**kwargs)
def __init__(self): super(AmphoraProviderDriver, self).__init__() topic = cfg.CONF.oslo_messaging.topic self.transport = messaging.get_rpc_transport(cfg.CONF) self.target = messaging.Target( namespace=consts.RPC_NAMESPACE_CONTROLLER_AGENT, topic=topic, version="1.0", fanout=False) self.client = messaging.RPCClient(self.transport, target=self.target) self.repositories = repositories.Repositories()
def __init__(self): super(BaseController, self).__init__() self.cert_manager = stevedore_driver.DriverManager( namespace='octavia.cert_manager', name=CONF.certificates.cert_manager, invoke_on_load=True, ).driver self.repositories = repositories.Repositories()
def __init__(self): super().__init__() self.target = messaging.Target( namespace=consts.RPC_NAMESPACE_CONTROLLER_AGENT, topic=consts.TOPIC_AMPHORA_V2, version="2.0", fanout=False) self.client = rpc.get_client(self.target) self.repositories = repositories.Repositories() key = utils.get_compatible_server_certs_key_passphrase() self.fernet = fernet.Fernet(key)
def __init__(self, **kwargs): self.repos = repo.Repositories() self.vthunder_repo = a10_repo.VThunderRepository() self.vrid_repo = a10_repo.VRIDRepository() self.amphora_repo = repo.AmphoraRepository() self.member_repo = a10_repo.MemberRepository() self.loadbalancer_repo = a10_repo.LoadBalancerRepository() self.vip_repo = repo.VipRepository() self.listener_repo = repo.ListenerRepository() super(BaseDatabaseTask, self).__init__(**kwargs)
def __init__(self): super().__init__() topic = cfg.CONF.oslo_messaging.topic self.target = messaging.Target( namespace=consts.RPC_NAMESPACE_CONTROLLER_AGENT, topic=topic, version="1.0", fanout=False) self.client = rpc.get_client(self.target) self.repositories = repositories.Repositories()
def __init__(self): super(A10ProviderDriver, self).__init__() self._args = {} self.transport = messaging.get_rpc_transport(cfg.CONF) self._args['fanout'] = False self._args['namespace'] = constants.RPC_NAMESPACE_CONTROLLER_AGENT self._args['topic'] = "a10_octavia" self._args['version'] = '1.0' self.target = messaging.Target(**self._args) self.client = messaging.RPCClient(self.transport, target=self.target) self.repositories = repositories.Repositories()
def __init__(self): self.key = cfg.CONF.health_manager.heartbeat_key self.ip = cfg.CONF.health_manager.bind_ip self.port = cfg.CONF.health_manager.bind_port self.sockaddr = None LOG.info('attempting to listen on %(ip)s port %(port)s', {'ip': self.ip, 'port': self.port}) self.sock = None self.update(self.key, self.ip, self.port) self.executor = futures.ProcessPoolExecutor( max_workers=cfg.CONF.health_manager.status_update_threads) self.repo = repositories.Repositories().amphorahealth
def __init__(self, **kwargs): self.repos = repo.Repositories() self.loadbalancer_repo = repo.LoadBalancerRepository() self.listener_repo = repo.ListenerRepository() self.pool_repo = repo.PoolRepository() self.health_mon_repo = repo.HealthMonitorRepository() self.member_repo = repo.MemberRepository() self.l7policy_repo = repo.L7PolicyRepository() self.l7rule_repo = repo.L7RuleRepository() self.listener_stats_repo = repo.ListenerStatisticsRepository() self.db_session = db_apis.get_session() super().__init__(**kwargs)
def __init__(self, **kwargs): self.repos = repo.Repositories() self.vthunder_repo = a10_repo.VThunderRepository() self.vrid_repo = a10_repo.VRIDRepository() self.amphora_repo = repo.AmphoraRepository() self.member_repo = a10_repo.MemberRepository() self.loadbalancer_repo = a10_repo.LoadBalancerRepository() self.vip_repo = repo.VipRepository() self.listener_repo = repo.ListenerRepository() self.flavor_repo = repo.FlavorRepository() self.flavor_profile_repo = repo.FlavorProfileRepository() self.nat_pool_repo = a10_repo.NatPoolRepository() self.vrrp_set_repo = a10_repo.VrrpSetRepository() super(BaseDatabaseTask, self).__init__(**kwargs)
def __init__(self): self._repositories = repo.Repositories() self._loadbalancer_repo = f5_repos.LoadBalancerRepository() self._amphora_repo = repo.AmphoraRepository() self._health_mon_repo = repo.HealthMonitorRepository() self._listener_repo = f5_repos.ListenerRepository() self._member_repo = repo.MemberRepository() self._pool_repo = f5_repos.PoolRepository() self._l7policy_repo = f5_repos.L7PolicyRepository() self._l7rule_repo = repo.L7RuleRepository() self._vip_repo = repo.VipRepository() self._quota_repo = repo.QuotasRepository() self.status = status_manager.StatusManager() self.sync = sync_manager.SyncManager(self.status, self._loadbalancer_repo) self.network_driver = driver_utils.get_network_driver() self.queue = SetQueue() worker = periodics.PeriodicWorker([ (self.pending_sync, None, None), (self.full_sync_reappearing_devices, None, None), (self.cleanup_orphaned_tenants, None, None) ]) t = threading.Thread(target=worker.start) t.daemon = True t.start() LOG.info("Starting as3worker") as3worker = threading.Thread(target=self.as3worker) as3worker.setDaemon(True) as3worker.start() if cfg.CONF.f5_agent.prometheus: prometheus_port = CONF.f5_agent.prometheus_port LOG.info('Starting Prometheus HTTP server on port {}'.format( prometheus_port)) prometheus.start_http_server(prometheus_port) super(ControllerWorker, self).__init__()
def __init__(self): super(NSXOctaviaDriver, self).__init__() self._init_rpc_messaging() self._init_rpc_listener() self._init_cert_manager() self.repositories = repositories.Repositories()
def __init__(self, **kwargs): self.repos = repo.Repositories() self.vthunder_repo = a10_repo.VThunderRepository() self.amphora_repo = repo.AmphoraRepository() super(BaseDatabaseTask, self).__init__(**kwargs)
def simulate_controller(data_model, delete=False, update=False, create=False): """Simulates a successful controller operator for a data model. :param data_model: data model to simulate controller operation :param delete: deletes from the database """ repo = repos.Repositories() def member_controller(member, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for member...")) db_mem = None if delete: db_mem = repo.member.get(db_api.get_session(), member.id) repo.member.delete(db_api.get_session(), id=member.id) elif update: db_mem = repo.member.get(db_api.get_session(), member.id) member_dict = member.to_dict() member_dict['operating_status'] = db_mem.operating_status repo.member.update(db_api.get_session(), member.id, **member_dict) elif create: repo.member.update(db_api.get_session(), member.id, operating_status=constants.ONLINE) listeners = [] if db_mem: for listener in db_mem.pool.listeners: if listener not in listeners: listeners.append(listener) if member.pool.listeners: for listener in member.pool.listeners: if listener not in listeners: listeners.append(listener) if listeners: for listener in listeners: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), member.pool.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def l7policy_controller(l7policy, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for l7policy...")) db_l7policy = None if delete: db_l7policy = repo.l7policy.get(db_api.get_session(), l7policy.id) repo.l7policy.delete(db_api.get_session(), id=l7policy.id) elif update: db_l7policy = repo.l7policy.get(db_api.get_session(), l7policy.id) l7policy_dict = l7policy.to_dict() repo.l7policy.update(db_api.get_session(), l7policy.id, **l7policy_dict) elif create: db_l7policy = repo.l7policy.create(db_api.get_session(), **l7policy_dict) if db_l7policy.listener: repo.listener.update(db_api.get_session(), db_l7policy.listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), db_l7policy.listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def l7rule_controller(l7rule, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for l7rule...")) db_l7rule = None if delete: db_l7rule = repo.l7rule.get(db_api.get_session(), l7rule.id) repo.l7rule.delete(db_api.get_session(), id=l7rule.id) elif update: db_l7rule = repo.l7rule.get(db_api.get_session(), l7rule.id) l7rule_dict = l7rule.to_dict() repo.l7rule.update(db_api.get_session(), l7rule.id, **l7rule_dict) elif create: db_l7rule = repo.l7rule.create(db_api.get_session(), **l7rule_dict) if db_l7rule.l7policy.listener: listener = db_l7rule.l7policy.listener repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def health_monitor_controller(health_monitor, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for health monitor...")) db_hm = None if delete: db_hm = repo.health_monitor.get(db_api.get_session(), pool_id=health_monitor.pool.id) repo.health_monitor.delete(db_api.get_session(), pool_id=health_monitor.pool.id) elif update: db_hm = repo.health_monitor.get(db_api.get_session(), health_monitor.pool_id) hm_dict = health_monitor.to_dict() hm_dict['operating_status'] = db_hm.operating_status() repo.health_monitor.update(db_api.get_session(), **hm_dict) elif create: repo.pool.update(db_api.get_session(), health_monitor.pool_id, operating_status=constants.ONLINE) listeners = [] if db_hm: for listener in db_hm.pool.listeners: if listener not in listeners: listeners.append(listener) if health_monitor.pool.listeners: for listener in health_monitor.pool.listeners: if listener not in listeners: listeners.append(listener) if listeners: for listener in listeners: repo.test_and_set_lb_and_listener_prov_status( db_api.get_session(), health_monitor.pool.load_balancer.id, listener.id, constants.ACTIVE, constants.ACTIVE) repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update( db_api.get_session(), health_monitor.pool.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def pool_controller(pool, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for pool...")) db_pool = None if delete: db_pool = repo.pool.get(db_api.get_session(), id=pool.id) repo.pool.delete(db_api.get_session(), id=pool.id) elif update: db_pool = repo.pool.get(db_api.get_session(), id=pool.id) pool_dict = pool.to_dict() pool_dict['operating_status'] = db_pool.operating_status repo.update_pool_and_sp(db_api.get_session(), pool.id, pool_dict) elif create: repo.pool.update(db_api.get_session(), pool.id, operating_status=constants.ONLINE) listeners = [] if db_pool: for listener in db_pool.listeners: if listener not in listeners: listeners.append(listener) if pool.listeners: for listener in pool.listeners: if listener not in listeners: listeners.append(listener) if listeners: for listener in listeners: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), pool.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def listener_controller(listener, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for listener...")) if delete: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.OFFLINE, provisioning_status=constants.DELETED) elif update: db_listener = repo.listener.get(db_api.get_session(), id=listener.id) listener_dict = listener.to_dict() listener_dict['operating_status'] = db_listener.operating_status repo.listener.update(db_api.get_session(), listener.id, **listener_dict) elif create: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def loadbalancer_controller(loadbalancer, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for loadbalancer...")) if delete: repo.load_balancer.update( db_api.get_session(), id=loadbalancer.id, operating_status=constants.OFFLINE, provisioning_status=constants.DELETED) elif update: db_lb = repo.listener.get(db_api.get_session(), id=loadbalancer.id) lb_dict = loadbalancer.to_dict() lb_dict['operating_status'] = db_lb.operating_status repo.load_balancer.update(db_api.get_session(), loadbalancer.id, **lb_dict) elif create: repo.load_balancer.update(db_api.get_session(), id=loadbalancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) controller = loadbalancer_controller if isinstance(data_model, data_models.Member): controller = member_controller elif isinstance(data_model, data_models.HealthMonitor): controller = health_monitor_controller elif isinstance(data_model, data_models.Pool): controller = pool_controller elif isinstance(data_model, data_models.Listener): controller = listener_controller thread = threading.Thread(target=controller, args=(data_model, delete, update, create)) thread.start()
def __init__(self): super(BaseController, self).__init__() self.repositories = repositories.Repositories()
def setUp(self): status_socket_file = '/tmp/octavia-{}.status.sock'.format( uuidutils.generate_uuid()) stats_socket_file = '/tmp/octavia-{}.stats.sock'.format( uuidutils.generate_uuid()) get_socket_file = '/tmp/octavia-{}.get.sock'.format( uuidutils.generate_uuid()) sqlite_db_file = '/tmp/octavia-{}.sqlite.db'.format( uuidutils.generate_uuid()) sqlite_db_connection = 'sqlite:///{}'.format(sqlite_db_file) # Note that because the driver agent is a multi-process # agent we must use a sqlite file rather than an # in-memory instance. super(DriverAgentTest, self).setUp(connection_string=sqlite_db_connection) conf = self.useFixture(oslo_fixture.Config(config.cfg.CONF)) conf.config(group="driver_agent", status_socket_path=status_socket_file) conf.config(group="driver_agent", stats_socket_path=stats_socket_file) conf.config(group="driver_agent", status_request_timeout=1) conf.config(group="driver_agent", get_socket_path=get_socket_file) conf.config(group="certificates", cert_manager='local_cert_manager') conf.config(group="certificates", storage_path='/tmp') # Set up the certificate cert_manager = stevedore_driver.DriverManager( namespace='octavia.cert_manager', name=CONF.certificates.cert_manager, invoke_on_load=True, ).driver self.cert_ref = cert_manager.store_cert( None, sample_certs.X509_CERT, sample_certs.X509_CERT_KEY_ENCRYPTED, sample_certs.X509_IMDS, private_key_passphrase=sample_certs.X509_CERT_KEY_PASSPHRASE) self.addCleanup(cert_manager.delete_cert, None, self.cert_ref) self.exit_event = multiprocessing.Event() self.status_listener_proc = multiprocessing.Process( name='status_listener', target=driver_listener.status_listener, args=(self.exit_event, )) # TODO(johnsom) Remove once https://bugs.python.org/issue6721 # is resolved. self.status_listener_proc.daemon = True self.status_listener_proc.start() self.stats_listener_proc = multiprocessing.Process( name='stats_listener', target=driver_listener.stats_listener, args=(self.exit_event, )) # TODO(johnsom) Remove once https://bugs.python.org/issue6721 # is resolved. self.stats_listener_proc.daemon = True self.stats_listener_proc.start() self.get_listener_proc = multiprocessing.Process( name='get_listener', target=driver_listener.get_listener, args=(self.exit_event, )) # TODO(johnsom) Remove once https://bugs.python.org/issue6721 # is resolved. self.get_listener_proc.daemon = True self.get_listener_proc.start() self.addCleanup(self._process_cleanup) self.driver_lib = octavia_driver_lib.DriverLibrary( status_socket=status_socket_file, stats_socket=stats_socket_file, get_socket=get_socket_file) self.sample_data = sample_data_models.SampleDriverDataModels() self.repos = repositories.Repositories() # Create the full load balancer in the database self.tls_container_dict = { lib_consts.CERTIFICATE: sample_certs.X509_CERT.decode('utf-8'), lib_consts.ID: sample_certs.X509_CERT_SHA1, lib_consts.INTERMEDIATES: [i.decode('utf-8') for i in sample_certs.X509_IMDS_LIST], lib_consts.PASSPHRASE: None, lib_consts.PRIMARY_CN: sample_certs.X509_CERT_CN, lib_consts.PRIVATE_KEY: sample_certs.X509_CERT_KEY.decode('utf-8') } # ### Create load balancer self.repos.flavor_profile.create(self.session, id=self.sample_data.flavor_profile_id, provider_name=constants.AMPHORA, flavor_data='{"something": "else"}') self.repos.flavor.create( self.session, id=self.sample_data.flavor_id, enabled=True, flavor_profile_id=self.sample_data.flavor_profile_id) self.repos.create_load_balancer_and_vip( self.session, self.sample_data.test_loadbalancer1_dict, self.sample_data.test_vip_dict) # ### Create Pool pool_dict = copy.deepcopy(self.sample_data.test_pool1_dict) pool_dict[constants.LOAD_BALANCER_ID] = self.sample_data.lb_id # Use a live certificate pool_dict[constants.TLS_CERTIFICATE_ID] = self.cert_ref pool_dict[constants.CA_TLS_CERTIFICATE_ID] = self.cert_ref pool_dict[constants.CRL_CONTAINER_ID] = self.cert_ref # Remove items that are linked in the DB del pool_dict[lib_consts.MEMBERS] del pool_dict[constants.HEALTH_MONITOR] del pool_dict[lib_consts.SESSION_PERSISTENCE] del pool_dict[lib_consts.LISTENERS] del pool_dict[lib_consts.L7POLICIES] self.repos.pool.create(self.session, **pool_dict) self.repos.session_persistence.create( self.session, pool_id=self.sample_data.pool1_id, type=lib_consts.SESSION_PERSISTENCE_SOURCE_IP) self.provider_pool_dict = copy.deepcopy( self.sample_data.provider_pool1_dict) self.provider_pool_dict[ constants.LISTENER_ID] = self.sample_data.listener1_id # Fix for render_unsets = True self.provider_pool_dict[lib_consts.SESSION_PERSISTENCE][ lib_consts.COOKIE_NAME] = None self.provider_pool_dict[lib_consts.SESSION_PERSISTENCE][ lib_consts.PERSISTENCE_GRANULARITY] = None self.provider_pool_dict[lib_consts.SESSION_PERSISTENCE][ lib_consts.PERSISTENCE_TIMEOUT] = None # Use a live certificate self.provider_pool_dict[ lib_consts.TLS_CONTAINER_DATA] = self.tls_container_dict self.provider_pool_dict[lib_consts.TLS_CONTAINER_REF] = self.cert_ref self.provider_pool_dict[lib_consts.CA_TLS_CONTAINER_DATA] = ( sample_certs.X509_CERT.decode('utf-8')) self.provider_pool_dict[ lib_consts.CA_TLS_CONTAINER_REF] = self.cert_ref self.provider_pool_dict[lib_consts.CRL_CONTAINER_DATA] = ( sample_certs.X509_CERT.decode('utf-8')) self.provider_pool_dict[lib_consts.CRL_CONTAINER_REF] = self.cert_ref # ### Create Member member_dict = copy.deepcopy(self.sample_data.test_member1_dict) self.repos.member.create(self.session, **member_dict) self.provider_pool_dict[lib_consts.MEMBERS] = [ self.sample_data.provider_member1_dict ] # ### Create Health Monitor hm_dict = copy.deepcopy(self.sample_data.test_hm1_dict) self.repos.health_monitor.create(self.session, **hm_dict) self.provider_pool_dict[ lib_consts.HEALTHMONITOR] = self.sample_data.provider_hm1_dict # ### Create Listener listener_dict = copy.deepcopy(self.sample_data.test_listener1_dict) listener_dict[lib_consts.DEFAULT_POOL_ID] = self.sample_data.pool1_id # Remove items that are linked in the DB del listener_dict[lib_consts.L7POLICIES] del listener_dict[lib_consts.DEFAULT_POOL] del listener_dict[constants.SNI_CONTAINERS] # Use a live certificate listener_dict[constants.TLS_CERTIFICATE_ID] = self.cert_ref listener_dict[constants.CLIENT_CA_TLS_CERTIFICATE_ID] = self.cert_ref listener_dict[constants.CLIENT_CRL_CONTAINER_ID] = self.cert_ref self.repos.listener.create(self.session, **listener_dict) self.repos.sni.create(self.session, listener_id=self.sample_data.listener1_id, tls_container_id=self.cert_ref, position=1) # Add our live certs in that differ from the fake certs in sample_data self.provider_listener_dict = copy.deepcopy( self.sample_data.provider_listener1_dict) self.provider_listener_dict[ lib_consts.DEFAULT_TLS_CONTAINER_REF] = self.cert_ref self.provider_listener_dict[ lib_consts.DEFAULT_TLS_CONTAINER_DATA] = self.tls_container_dict self.provider_listener_dict[ lib_consts.CLIENT_CA_TLS_CONTAINER_REF] = self.cert_ref self.provider_listener_dict[ lib_consts.CLIENT_CA_TLS_CONTAINER_DATA] = ( sample_certs.X509_CERT.decode('utf-8')) self.provider_listener_dict[ lib_consts.CLIENT_CRL_CONTAINER_REF] = self.cert_ref self.provider_listener_dict[lib_consts.CLIENT_CRL_CONTAINER_DATA] = ( sample_certs.X509_CERT.decode('utf-8')) self.provider_listener_dict[lib_consts.SNI_CONTAINER_DATA] = [ self.tls_container_dict ] self.provider_listener_dict[lib_consts.SNI_CONTAINER_REFS] = [ self.cert_ref ] self.provider_listener_dict[ lib_consts.DEFAULT_POOL] = self.provider_pool_dict self.provider_listener_dict[ lib_consts.DEFAULT_POOL_ID] = self.sample_data.pool1_id self.provider_listener_dict[lib_consts.L7POLICIES] = [ self.sample_data.provider_l7policy1_dict ] # ### Create L7 Policy l7policy_dict = copy.deepcopy(self.sample_data.test_l7policy1_dict) del l7policy_dict[lib_consts.L7RULES] self.repos.l7policy.create(self.session, **l7policy_dict) # ### Create L7 Rules l7rule_dict = copy.deepcopy(self.sample_data.test_l7rule1_dict) self.repos.l7rule.create(self.session, **l7rule_dict) l7rule2_dict = copy.deepcopy(self.sample_data.test_l7rule2_dict) self.repos.l7rule.create(self.session, **l7rule2_dict) self.provider_lb_dict = copy.deepcopy( self.sample_data.provider_loadbalancer_tree_dict) self.provider_lb_dict[lib_consts.POOLS] = [self.provider_pool_dict] self.provider_lb_dict[lib_consts.LISTENERS] = [ self.provider_listener_dict ]
def simulate_controller(data_model, delete=False, update=False, create=False): """Simulates a successful controller operator for a data model. :param data_model: data model to simulate controller operation :param delete: deletes from the database """ repo = repos.Repositories() def member_controller(member, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for member...")) if delete: repo.member.delete(db_api.get_session(), id=member.id) elif update: old_mem = repo.member.get(db_api.get_session(), member.id) member_dict = member.to_dict() member_dict['operating_status'] = old_mem.operating_status repo.member.update(db_api.get_session(), member.id, **member_dict) elif create: repo.member.update(db_api.get_session(), member.id, operating_status=constants.ONLINE) repo.listener.update(db_api.get_session(), member.pool.listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), member.pool.listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def health_monitor_controller(health_monitor, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for health monitor...")) if delete: repo.health_monitor.delete(db_api.get_session(), pool_id=health_monitor.pool.id) elif update: hm = repo.health_monitor.get(db_api.get_session(), health_monitor.pool_id) hm_dict = health_monitor.to_dict() hm_dict['operating_status'] = hm.operating_status() repo.health_monitor.update(db_api.get_session(), **hm_dict) elif create: repo.pool.update(db_api.get_session(), health_monitor.pool_id, operating_status=constants.ONLINE) repo.test_and_set_lb_and_listener_prov_status( db_api.get_session(), health_monitor.pool.listener.load_balancer.id, health_monitor.pool.listener.id, constants.ACTIVE, constants.ACTIVE) repo.listener.update(db_api.get_session(), health_monitor.pool.listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update( db_api.get_session(), health_monitor.pool.listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def pool_controller(pool, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for pool...")) if delete: repo.pool.delete(db_api.get_session(), id=pool.id) elif update: db_pool = repo.pool.get(db_api.get_session(), id=pool.id) pool_dict = pool.to_dict() pool_dict['operating_status'] = db_pool.operating_status sp_dict = pool_dict.pop('session_persistence', None) repo.update_pool_on_listener(db_api.get_session(), pool.id, pool_dict, sp_dict) elif create: repo.pool.update(db_api.get_session(), pool.id, operating_status=constants.ONLINE) repo.listener.update(db_api.get_session(), pool.listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), pool.listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def listener_controller(listener, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for listener...")) if delete: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.OFFLINE, provisioning_status=constants.DELETED) elif update: db_listener = repo.listener.get(db_api.get_session(), id=listener.id) listener_dict = listener.to_dict() listener_dict['operating_status'] = db_listener.operating_status repo.listener.update(db_api.get_session(), listener.id, **listener_dict) elif create: repo.listener.update(db_api.get_session(), listener.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) repo.load_balancer.update(db_api.get_session(), listener.load_balancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) def loadbalancer_controller(loadbalancer, delete=False, update=False, create=False): time.sleep(ASYNC_TIME) LOG.info(_LI("Simulating controller operation for loadbalancer...")) if delete: repo.load_balancer.update(db_api.get_session(), id=loadbalancer.id, operating_status=constants.OFFLINE, provisioning_status=constants.DELETED) elif update: db_lb = repo.listener.get(db_api.get_session(), id=loadbalancer.id) lb_dict = loadbalancer.to_dict() lb_dict['operating_status'] = db_lb.operating_status repo.load_balancer.update(db_api.get_session(), loadbalancer.id, **lb_dict) elif create: repo.load_balancer.update(db_api.get_session(), id=loadbalancer.id, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) LOG.info(_LI("Simulated Controller Handler Thread Complete")) controller = loadbalancer_controller if isinstance(data_model, data_models.Member): controller = member_controller elif isinstance(data_model, data_models.HealthMonitor): controller = health_monitor_controller elif isinstance(data_model, data_models.Pool): controller = pool_controller elif isinstance(data_model, data_models.Listener): controller = listener_controller thread = threading.Thread(target=controller, args=(data_model, delete, update, create)) thread.start()