def test_get_state_single_job(self): bundle = utils.gen_bundle() cluster_proto = utils.gen_prototype(bundle, 'cluster') cluster = utils.gen_cluster(prototype=cluster_proto) action = utils.gen_action(prototype=cluster_proto) action.state_on_success = 'success' action.state_on_fail = 'fail' action.multi_state_on_success_set = ['success'] action.multi_state_on_success_unset = ['success unset'] action.multi_state_on_fail_set = ['fail'] action.multi_state_on_fail_unset = ['fail unset'] action.save() task = utils.gen_task_log(cluster, action) job = utils.gen_job_log(task) # status: expected state, expected multi_state set, expected multi_state unset test_data = [ [config.Job.SUCCESS, 'success', ['success'], ['success unset']], [config.Job.FAILED, 'fail', ['fail'], ['fail unset']], [config.Job.ABORTED, None, [], []], ] for status, exp_state, exp_m_state_set, exp_m_state_unset in test_data: state, m_state_set, m_state_unset = job_module.get_state( action, job, status) self.assertEqual(state, exp_state) self.assertListEqual(m_state_set, exp_m_state_set) self.assertListEqual(m_state_unset, exp_m_state_unset)
def test_unlock_affected(self): cluster = utils.gen_cluster() task = utils.gen_task_log(cluster) utils.gen_job_log(task) task.lock_affected([cluster]) task.unlock_affected() self.assertFalse(cluster.locked) self.assertIsNone(task.lock)
def test_lock_affected(self): cluster = utils.gen_cluster() task = utils.gen_task_log(cluster) utils.gen_job_log(task) task.lock_affected([cluster]) self.assertTrue(cluster.locked) task.refresh_from_db() self.assertIsNotNone(task.lock)
def test_lock_affected__lock_is_single(self): cluster = utils.gen_cluster() task = utils.gen_task_log(cluster) utils.gen_job_log(task) task.lock = utils.gen_concern_item(models.ConcernType.Lock) task.save() task.lock_affected([cluster]) self.assertFalse(cluster.locked)
def test_save_hc__big_update__locked_hierarchy(self, mock_issue, mock_load, ctx): """ Update bigger HC map - move `component_2` from `host_2` to `host_3` On locked hierarchy (from ansible task) Test: host_1 remains the same host_2 is unlocked host_3 became locked """ service = utils.gen_service(self.cluster) component_1 = utils.gen_component(service) component_2 = utils.gen_component(service) provider = utils.gen_provider() host_1 = utils.gen_host(provider, cluster=self.cluster) host_2 = utils.gen_host(provider, cluster=self.cluster) host_3 = utils.gen_host(provider, cluster=self.cluster) utils.gen_host_component(component_1, host_1) utils.gen_host_component(component_2, host_2) task = utils.gen_task_log(service) utils.gen_job_log(task) tree = cm.hierarchy.Tree(self.cluster) affected = (node.value for node in tree.get_all_affected(tree.built_from)) task.lock_affected(affected) ctx.lock = task.lock # refresh due to new instances were updated in task.lock_affected() host_1.refresh_from_db() host_2.refresh_from_db() host_3.refresh_from_db() self.assertTrue(host_1.locked) self.assertTrue(host_2.locked) self.assertFalse(host_3.locked) new_hc_list = [ (service, host_1, component_1), (service, host_3, component_2), ] api_module.save_hc(self.cluster, new_hc_list) # refresh due to new instances were updated in save_hc() host_1.refresh_from_db() host_2.refresh_from_db() host_3.refresh_from_db() self.assertTrue(host_1.locked) self.assertFalse(host_2.locked) self.assertTrue(host_3.locked)