def test_deployment_logs(self): message = 'TEST MESSAGE' inputs = {'message': message} dsl_path = resource("dsl/deployment_logs.yaml") deployment, _ = deploy(dsl_path, inputs=inputs) work_dir = testenv.testenv_instance.test_working_dir deployment_log_path = os.path.join( work_dir, 'cloudify.management', 'work', 'logs', '{0}.log'.format(deployment.id)) def verify_logs_exist_with_content(): print deployment_log_path self.assertTrue(os.path.isfile(deployment_log_path)) with open(deployment_log_path) as f: self.assertIn(message, f.read()) verify_logs_exist_with_content() undeploy(deployment.id, is_delete_deployment=True) # Verify log file id truncated on deployment delete with open(deployment_log_path) as f: self.assertTrue('' == f.read()) deployment, _ = deploy(dsl_path, inputs=inputs, deployment_id=deployment.id) # Verify new deployment with the same deployment id # can write to the previous location. verify_logs_exist_with_content()
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False, inputs=None): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: with self.assertRaises(RuntimeError) as cm: deploy(dsl_path=resource(blueprint), deployment_id=deployment_id, inputs=inputs) self.assertIn('Failing task on user defined exception', str(cm.exception)) else: deploy(resource(blueprint), deployment_id=deployment_id, inputs=inputs) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id)[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i + 1] - invocations[i])
def test_deployment_logs(self): message = 'TEST MESSAGE' inputs = {'message': message} dsl_path = resource("dsl/deployment_logs.yaml") deployment, _ = deploy(dsl_path, inputs=inputs) work_dir = testenv.testenv_instance.test_working_dir deployment_log_path = os.path.join(work_dir, 'cloudify.management', 'work', 'logs', '{0}.log'.format(deployment.id)) def verify_logs_exist_with_content(): print deployment_log_path self.assertTrue(os.path.isfile(deployment_log_path)) with open(deployment_log_path) as f: self.assertIn(message, f.read()) verify_logs_exist_with_content() undeploy(deployment.id, is_delete_deployment=True) # Verify log file id truncated on deployment delete with open(deployment_log_path) as f: self.assertTrue('' == f.read()) deployment, _ = deploy(dsl_path, inputs=inputs, deployment_id=deployment.id) # Verify new deployment with the same deployment id # can write to the previous location. verify_logs_exist_with_content()
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False, inputs=None): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: with self.assertRaises(RuntimeError) as cm: deploy( dsl_path=resource(blueprint), deployment_id=deployment_id, inputs=inputs) self.assertIn('Failing task on user defined exception', str(cm.exception)) else: deploy(resource(blueprint), deployment_id=deployment_id, inputs=inputs) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id )[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i+1] - invocations[i])
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: self.assertRaises(RuntimeError, deploy, dsl_path=resource(blueprint), deployment_id=deployment_id) else: deploy(resource(blueprint), deployment_id=deployment_id) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id )[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i+1] - invocations[i])
def test_execute_operation_failure(self): deployment_id = str(uuid.uuid4()) dsl_path = resource("dsl/basic.yaml") try: deploy(dsl_path, deployment_id=deployment_id) self.fail('expected exception') except Exception as e: if e.message: self.logger.info(e.message) pass
def _local_task_fail_impl(self, wf_name): if self.do_get: deploy(resource('dsl/workflow_api.yaml'), wf_name, parameters={'do_get': self.do_get}) else: self.assertRaises(RuntimeError, deploy, resource('dsl/workflow_api.yaml'), wf_name, parameters={'do_get': self.do_get})
def _test1(self): num_deps = 5 for i in range(num_deps): deploy(resource('dsl/basic.yaml'), deployment_id='d{0}'.format(i)) while True: for workflow in ['uninstall', 'install']: executions = [] for i in range(num_deps): execution = execute_workflow( workflow, deployment_id='d{0}'.format(i), wait_for_execution=False) executions.append(execution) for execution in executions: wait_for_execution_to_end(execution)
def test_workflow_deployment_scaling_groups(self): deployment, _ = deploy(resource('dsl/store-scaling-groups.yaml'), workflow_name='workflow') instance = self.client.node_instances.list(deployment.id)[0] self.assertEqual( ['node'], instance.runtime_properties['scaling_groups']['group1']['members'])
def test_executions_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_sort('executions', ['deployment_id', '-status'])
def test_modification_operations(self): dsl_path = resource("dsl/deployment_modification_operations.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id execute_workflow('deployment_modification', deployment_id) invocations = self.get_plugin_data( 'testmockoperations', deployment_id)['mock_operation_invocation'] self.assertEqual( 1, len([i for i in invocations if i['operation'] == 'create'])) self.assertEqual( 2, len([i for i in invocations if i['operation'] == 'preconfigure'])) self.assertEqual( 2, len([i for i in invocations if i['operation'] == 'preconfigure'])) configure_invocations = [ i for i in invocations if i['operation'] == 'configure' ] self.assertEqual(1, len(configure_invocations)) self.assertEqual(1, len(configure_invocations[0]['target_ids'])) start_invocations = [ i for i in invocations if i['operation'] == 'start' ] self.assertEqual(1, len(start_invocations)) self.assertEqual(2, len(start_invocations[0]['target_ids']))
def test_executions_pagination(self): deployment = deploy(resource('dsl/pagination.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_pagination( partial(self.client.executions.list, deployment_id=deployment.id))
def test_pre_source_started_location_source(self): dsl_path = resource( "dsl/relationship_interface_pre_source_location_source.yaml") deployment, _ = deploy(dsl_path) self.verify_assertions(deployment.id, hook='pre-init', runs_on_source=True)
def test_deploy_with_operation_executor_override(self): dsl_path = resource('dsl/operation_executor_override.yaml') deployment, _ = deploy(dsl_path) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] start_invocation = self.get_plugin_data( plugin_name='target_aware_mock_plugin', deployment_id=deployment.id )[webserver_node.id]['start'] expected_start_invocation = {'target': deployment.id} self.assertEqual(expected_start_invocation, start_invocation) plugin_installer_data = self.get_plugin_data( plugin_name='plugin_installer', deployment_id=deployment.id ) deployment_operations_worker_name = deployment.id # target_aware_mock_plugin should have been installed # on the deployment worker as well because 'start' # overrides the executor self.assertEqual( plugin_installer_data[ deployment_operations_worker_name ]['target_aware_mock_plugin'], ['installed']) undeploy(deployment_id=deployment.id)
def test_post_source_started_location_target(self): dsl_path = resource( "dsl/relationship_interface_post_source_location_target.yaml") deployment, _ = deploy(dsl_path) self.verify_assertions(deployment.id, hook='post-init', runs_on_source=False)
def test_plugin_workdir(self): filename = 'test_plugin_workdir.txt' host_content = 'HOST_CONTENT' central_content = 'CENTRAL_CONTENT' dsl_path = resource("dsl/plugin_workdir.yaml") deployment, _ = deploy(dsl_path, inputs={ 'filename': filename, 'host_content': host_content, 'central_content': central_content }) host_id = self.client.node_instances.list(node_id='host').items[0].id from testenv import testenv_instance test_workdir = testenv_instance.test_working_dir central_agent = CeleryWorkerProcess(['cloudify.management'], test_workdir) host_agent = CeleryWorkerProcess([host_id], test_workdir) central_file = os.path.join( central_agent.workdir, 'deployments', deployment.id, 'plugins', 'testmockoperations', filename) host_file = os.path.join( host_agent.workdir, 'plugins', 'testmockoperations', filename) with open(central_file) as f: self.assertEqual(central_content, f.read()) with open(host_file) as f: self.assertEqual(host_content, f.read())
def launch_deployment(self, yaml_file, expected_num_of_node_instances=1): deployment, _ = deploy(resource(yaml_file)) self.deployment = deployment self.node_instances = self.client.node_instances.list(deployment.id) self.assertEqual(expected_num_of_node_instances, len(self.node_instances)) self.wait_for_executions(self.NUM_OF_INITIAL_WORKFLOWS)
def test_executions_pagination(self): deployment = deploy(resource('dsl/pagination.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_pagination(partial(self.client.executions.list, deployment_id=deployment.id))
def test_uninstall_with_dependency_order(self): dsl_path = resource( "dsl/uninstall_dependencies-order-with-three-nodes.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id undeploy(deployment_id) # Checking that uninstall wasn't called on the contained node states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment_id)['state'] node1_id = states[0]['id'] node2_id = states[1]['id'] node3_id = states[2]['id'] unreachable_call_order = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id)['unreachable_call_order'] self.assertEquals(3, len(unreachable_call_order)) self.assertEquals(node3_id, unreachable_call_order[0]['id']) self.assertEquals(node2_id, unreachable_call_order[1]['id']) self.assertEquals(node1_id, unreachable_call_order[2]['id']) configurer_state = self.get_plugin_data( plugin_name='connection_configurer_mock', deployment_id=deployment_id)['state'] self.assertEquals(2, len(configurer_state)) self.assertTrue( configurer_state[0]['id'].startswith('contained_in_node2')) self.assertTrue( configurer_state[0]['related_id'].startswith('contained_in_node1')) self.assertTrue( configurer_state[1]['id'].startswith('containing_node')) self.assertTrue( configurer_state[1]['related_id'].startswith('contained_in_node1'))
def test_cancel_on_wait_for_task_termination(self): _, eid = deploy( resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def test_plugin_workdir(self): filename = 'test_plugin_workdir.txt' host_content = 'HOST_CONTENT' central_content = 'CENTRAL_CONTENT' dsl_path = resource("dsl/plugin_workdir.yaml") deployment, _ = deploy(dsl_path, inputs={ 'filename': filename, 'host_content': host_content, 'central_content': central_content }) host_id = self.client.node_instances.list(node_id='host').items[0].id from testenv import testenv_instance test_workdir = testenv_instance.test_working_dir central_agent = CeleryWorkerProcess(['cloudify.management'], test_workdir) host_agent = CeleryWorkerProcess([host_id], test_workdir) central_file = os.path.join(central_agent.workdir, 'deployments', deployment.id, 'plugins', 'testmockoperations', filename) host_file = os.path.join(host_agent.workdir, 'plugins', 'testmockoperations', filename) with open(central_file) as f: self.assertEqual(central_content, f.read()) with open(host_file) as f: self.assertEqual(host_content, f.read())
def test_simple(self): parameters = { 'do_get': self.do_get, 'key': 'key1', 'value': 'value1' } result_dict = { 'key1': 'value1' } deployment, _ = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters=parameters) # testing workflow remote task invocation = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id )['mock_operation_invocation'][0] self.assertDictEqual(result_dict, invocation) # testing workflow local task instance = self.client.node_instances.list( deployment_id=deployment.id)[0] # I am in love with eventual consistency instance = self.client.node_instances.get(instance.id) self.assertEqual('test_state', instance.state)
def test_deploy_with_operation_executor_override(self): dsl_path = resource('dsl/operation_executor_override.yaml') deployment, _ = deploy(dsl_path) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] start_invocation = self.get_plugin_data( plugin_name='target_aware_mock_plugin', deployment_id=deployment.id )[webserver_node.id]['start'] expected_start_invocation = {'target': deployment.id} self.assertEqual(expected_start_invocation, start_invocation) agent_data = self.get_plugin_data( plugin_name='agent', deployment_id=deployment.id ) deployment_operations_worker_name = deployment.id # target_aware_mock_plugin should have been installed # on the deployment worker as well because 'start' # overrides the executor self.assertEqual( agent_data[ deployment_operations_worker_name ]['target_aware_mock_plugin'], ['installed']) undeploy(deployment_id=deployment.id)
def test_threshold_policy(self): dsl_path = resource("dsl/with_policies2.yaml") deployment, _ = deploy(dsl_path) self.deployment_id = deployment.id self.instance_id = self.wait_for_node_instance().id class Tester(object): def __init__(self, test_case, threshold, current_executions, current_invocations): self.test_case = test_case self.current_invocations = current_invocations self.current_executions = current_executions self.threshold = threshold def publish_above_threshold(self, deployment_id, do_assert): self.test_case.logger.info('Publish above threshold') self.test_case.publish(self.threshold + 1) if do_assert: self.inc() self.assertion(deployment_id, upper=True) def publish_below_threshold(self, deployment_id, do_assert): self.test_case.logger.info('Publish below threshold') self.test_case.publish(self.threshold - 1) if do_assert: self.inc() self.assertion(deployment_id, upper=False) def inc(self): self.current_executions += 1 self.current_invocations += 1 def assertion(self, deployment_id, upper): self.test_case.logger.info('waiting for {} executions'.format( self.current_executions)) self.test_case.wait_for_executions(self.current_executions) self.test_case.logger.info('waiting for {} invocations'.format( self.current_invocations)) invocations = self.test_case.wait_for_invocations( deployment_id, self.current_invocations) if upper: key = 'upper' value = self.threshold + 1 else: key = 'lower' value = self.threshold - 1 self.test_case.assertEqual( invocations[-1][key], value, 'key: {}, expected: {}'.format(key, value)) tester = Tester(test_case=self, threshold=100, current_executions=2, current_invocations=0) for _ in range(2): tester.publish_above_threshold(deployment.id, do_assert=True) tester.publish_above_threshold(deployment.id, do_assert=False) tester.publish_below_threshold(deployment.id, do_assert=True) tester.publish_below_threshold(deployment.id, do_assert=False)
def test_script_mapping_to_deployment_resource(self): dsl_path = resource('dsl/test_script_mapping.yaml') deployment, _ = deploy(dsl_path) workflow_script_path = resource('dsl/scripts/workflows/workflow.py') with open(workflow_script_path, 'r') as f: workflow_script_content = f.read() deployment_folder_on_fs = os.path.join( testenv.testenv_instance.fileserver_dir, 'deployments/{0}/scripts/workflows'.format(deployment.id)) try: os.makedirs(deployment_folder_on_fs) deployment_workflow_script_path = os.path.join( deployment_folder_on_fs, 'workflow.py') self.logger.info('Writing workflow.py to: {0}'.format( deployment_workflow_script_path)) with open(deployment_workflow_script_path, 'w') as f: f.write(workflow_script_content) f.write(os.linesep) f.write("instance.execute_operation('test.op3')") f.write(os.linesep) execute_workflow('workflow', deployment.id) data = self.get_plugin_data(plugin_name='script', deployment_id=deployment.id) self.assertEqual(data['op1_called_with_property'], 'op2_called') self.assertEqual(data['op2_prop'], 'op2_value') self.assertIn('op3_called', data) finally: shutil.rmtree(deployment_folder_on_fs, ignore_errors=True)
def test_deploy_with_agent_worker_windows_3_2(self): dsl_path = resource('dsl/with_agent_worker_windows_3_2.yaml') deployment, _ = deploy(dsl_path, timeout_seconds=500) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] invocations = self.get_plugin_data( plugin_name='mock_agent_plugin', deployment_id=deployment.id )[webserver_node.id] agent_installer_data = self.get_plugin_data( plugin_name='windows_agent_installer', deployment_id=deployment.id ) # agent on host should have been started and restarted self.assertEqual( agent_installer_data[webserver_node.host_id]['states'], ['created', 'configured', 'started', 'restarted']) plugin_installer_data = self.get_plugin_data( plugin_name='windows_plugin_installer', deployment_id=deployment.id ) self.assertEqual( plugin_installer_data[ webserver_node.host_id ]['mock_agent_plugin'], ['installed']) expected_invocations = ['create', 'start'] self.assertListEqual(invocations, expected_invocations) undeploy(deployment_id=deployment.id) invocations = self.get_plugin_data( plugin_name='mock_agent_plugin', deployment_id=deployment.id )[webserver_node.id] expected_invocations = ['create', 'start', 'stop', 'delete'] self.assertListEqual(invocations, expected_invocations) # agent on host should have also # been stopped and uninstalled agent_installer_data = self.get_plugin_data( plugin_name='windows_agent_installer', deployment_id=deployment.id ) self.assertEqual( agent_installer_data[webserver_node.host_id]['states'], ['created', 'configured', 'started', 'restarted', 'stopped', 'deleted'])
def test_deployment_modifications_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={'node': {'instances': i}}) self.client.deployment_modifications.finish(modification.id) self._test_sort('deployment_modifications', 'deployment_id')
def test_cancel_on_wait_for_task_termination(self): _, eid = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def test_deployment_modifications_pagination(self): deployment = deploy(resource("dsl/pagination.yaml")) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={"node": {"instances": i}} ) self.client.deployment_modifications.finish(modification.id) self._test_pagination(partial(self.client.deployment_modifications.list, deployment_id=deployment.id))
def test_cancel_on_task_retry_interval(self): self.configure(retries=2, interval=1000000) _, eid = deploy( resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def launch_deployment(self, yaml_file, expected_num_of_node_instances=1): deployment, _ = deploy(resource(yaml_file)) self.deployment = deployment self.node_instances = self.client.node_instances.list(deployment.id) self.assertEqual( expected_num_of_node_instances, len(self.node_instances) ) self.wait_for_executions(self.NUM_OF_INITIAL_WORKFLOWS)
def test_get_blueprint(self): dsl_path = resource("dsl/basic.yaml") blueprint_id = str(uuid.uuid4()) deployment, _ = deploy(dsl_path, blueprint_id=blueprint_id) self.assertEqual(blueprint_id, deployment.blueprint_id) blueprint = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, blueprint.id) self.assertTrue(len(blueprint['plan']) > 0)
def test_subgraph_retries_provider_config_config(self): context = { 'cloudify': { 'workflows': { 'task_retries': 0, 'task_retry_interval': 0, 'subgraph_retries': 2 } } } deployment_id = str(uuid.uuid4()) self.client.manager.create_context(self._testMethodName, context) deploy(resource('dsl/workflow_subgraph_retries.yaml'), deployment_id=deployment_id) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id)['failure_invocation'] self.assertEqual(len(invocations), 3)
def test_script_mapping(self): dsl_path = resource('dsl/test_script_mapping.yaml') deployment, _ = deploy(dsl_path) execute_workflow('workflow', deployment.id) data = self.get_plugin_data(plugin_name='script', deployment_id=deployment.id) self.assertEqual(data['op1_called_with_property'], 'op2_called') self.assertEqual(data['op2_prop'], 'op2_value')
def test_deployment_modifications_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={'node': { 'instances': i }}) self.client.deployment_modifications.finish(modification.id) self._test_sort('deployment_modifications', 'deployment_id')
def test_deploy_multi_instance_many_different_hosts(self): dsl_path = resource("dsl/multi_instance_many_different_hosts.yaml") deployment, _ = deploy(dsl_path) machines = set(self.get_plugin_data(plugin_name="cloudmock", deployment_id=deployment.id)["machines"]) self.assertEquals(15, len(machines)) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host1"), machines))) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host2"), machines))) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host3"), machines)))
def test_start_monitor_node_operation(self): dsl_path = resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = deploy(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['monitoring_operations_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('start_monitor', invocation['operation'])
def test_cancel_on_task_retry_interval(self): self.configure(retries=2, interval=1000000) _, eid = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def _test_custom_workflow(self, workflow, error_expected=False): deployment = deploy(resource("dsl/basic_task_not_exist.yaml")) try: execute_workflow(workflow, deployment.id) if error_expected: self.fail('RuntimeError expected') except RuntimeError as e: if not error_expected: self.fail('Success expected. error message: {0}'.format(e)) self.assertIn(self.AGENT_ALIVE_FAIL, str(e))
def test_deployment_workflows(self): dsl_path = resource("dsl/custom_workflow_mapping.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id workflows = self.client.deployments.get(deployment_id).workflows self.assertEqual(3, len(workflows)) wf_ids = [x.name for x in workflows] self.assertTrue('uninstall' in wf_ids) self.assertTrue('install' in wf_ids) self.assertTrue('custom' in wf_ids)
def test_operation_mapping_override(self): dsl_path = resource("dsl/operation_mapping.yaml") deployment, _ = deploy(dsl_path, 'workflow2') invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['mock_operation_invocation'] self.assertEqual(3, len(invocations)) for invocation in invocations: self.assertEqual(1, len(invocation)) self.assertEqual(invocation['test_key'], 'overridden_test_value')
def test_start_monitor_node_operation(self): dsl_path = resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = deploy(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id )['monitoring_operations_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('start_monitor', invocation['operation'])
def test_failed_uninstall_task(self): dsl_path = resource('dsl/basic_stop_error.yaml') deployment, _ = deploy(dsl_path) deployment_id = deployment.id undeploy(deployment_id) machines = self.get_plugin_data( plugin_name='cloudmock', deployment_id=deployment_id)['machines'] self.assertEquals(0, len(machines))
def test_update_node_bad_version(self): deploy(resource("dsl/basic.yaml")) client = create_rest_client() instance = client.node_instances.list()[0] instance = client.node_instances.get(instance.id) # need the version props = {'key': 'value'} result = client.node_instances.update(instance.id, state='started', runtime_properties=props, version=instance.version,) self.assertEquals(instance.version+1, result.version) self.assertEquals(instance.id, result.id) self.assertDictContainsSubset(props, result.runtime_properties) self.assertEquals('started', result.state) # making another call with a bad version self.assertRaises( CloudifyClientError, client.node_instances.update, instance.id, version=1)
def test_deploy_multi_instance_application(self): dsl_path = resource("dsl/multi_instance.yaml") deployment, _ = deploy(dsl_path) machines = set(self.get_plugin_data(plugin_name="cloudmock", deployment_id=deployment.id)["machines"]) self.assertEquals(2, len(machines)) apps_state = self.get_plugin_data(plugin_name="testmockoperations", deployment_id=deployment.id)["state"] machines_with_apps = set([]) for app_state in apps_state: host_id = app_state["capabilities"].keys()[0] machines_with_apps.add(host_id) self.assertEquals(machines, machines_with_apps)
def test_operation_mapping_override(self): dsl_path = resource("dsl/operation_mapping.yaml") deployment, _ = deploy(dsl_path, 'workflow2') invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id )['mock_operation_invocation'] self.assertEqual(3, len(invocations)) for invocation in invocations: self.assertEqual(1, len(invocation)) self.assertEqual(invocation['test_key'], 'overridden_test_value')
def test_deployment_modifications_pagination(self): deployment = deploy(resource('dsl/pagination.yaml')) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={'node': { 'instances': i }}) self.client.deployment_modifications.finish(modification.id) self._test_pagination( partial(self.client.deployment_modifications.list, deployment_id=deployment.id))
def test_uninstall_application_single_host_node(self): dsl_path = resource("dsl/basic.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id undeploy(deployment_id) machines = self.get_plugin_data( plugin_name='cloudmock', deployment_id=deployment_id)['machines'] self.assertEquals(0, len(machines))
def test_inject_properties_to_operation(self): dsl_path = resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = deploy(dsl_path) states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment.id)['state'] invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['mock_operation_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('mockpropvalue', invocation['mockprop']) self.assertEqual(states[0]['id'], invocation['id'])
def test_dependencies_order_with_two_nodes(self): dsl_path = resource("dsl/dependencies_order_with_two_nodes.yaml") blueprint_id = self.id() deployment, _ = deploy(dsl_path, blueprint_id=blueprint_id) self.assertEquals(blueprint_id, deployment.blueprint_id) states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment.id)['state'] self.assertEquals(2, len(states)) self.assertTrue('host_node' in states[0]['id']) self.assertTrue('db_node' in states[1]['id'])
def test_script_mapping(self): """ Tests policy/trigger/group creation and processing flow """ dsl_path = resource('dsl/test_script_mapping.yaml') deployment, _ = deploy(dsl_path) execute_workflow('workflow', deployment.id) data = self.get_plugin_data(plugin_name='script', deployment_id=deployment.id) self.assertEqual(data['op1_called_with_property'], 'op2_called') self.assertEqual(data['op2_prop'], 'op2_value')