def test_script_mapping_to_deployment_resource(self): dsl_path = resource('dsl/test_script_mapping.yaml') deployment, _ = deploy(dsl_path) workflow_script_path = resource('dsl/scripts/workflows/workflow.py') with open(workflow_script_path, 'r') as f: workflow_script_content = f.read() deployment_folder_on_fs = os.path.join( testenv.testenv_instance.fileserver_dir, 'deployments/{0}/scripts/workflows'.format(deployment.id)) try: os.makedirs(deployment_folder_on_fs) deployment_workflow_script_path = os.path.join( deployment_folder_on_fs, 'workflow.py') self.logger.info('Writing workflow.py to: {0}'.format( deployment_workflow_script_path)) with open(deployment_workflow_script_path, 'w') as f: f.write(workflow_script_content) f.write(os.linesep) f.write("instance.execute_operation('test.op3')") f.write(os.linesep) execute_workflow('workflow', deployment.id) data = self.get_plugin_data(plugin_name='script', deployment_id=deployment.id) self.assertEqual(data['op1_called_with_property'], 'op2_called') self.assertEqual(data['op2_prop'], 'op2_value') self.assertIn('op3_called', data) finally: shutil.rmtree(deployment_folder_on_fs, ignore_errors=True)
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: self.assertRaises(RuntimeError, deploy, dsl_path=resource(blueprint), deployment_id=deployment_id) else: deploy(resource(blueprint), deployment_id=deployment_id) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id )[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i+1] - invocations[i])
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False, inputs=None): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: with self.assertRaises(RuntimeError) as cm: deploy(dsl_path=resource(blueprint), deployment_id=deployment_id, inputs=inputs) self.assertIn('Failing task on user defined exception', str(cm.exception)) else: deploy(resource(blueprint), deployment_id=deployment_id, inputs=inputs) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id)[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i + 1] - invocations[i])
def _test_retries_and_retry_interval_impl(self, blueprint, retries, retry_interval, expected_interval, expected_retries, invocations_type, expect_failure=False, inputs=None): self.configure(retries=retries, retry_interval=retry_interval) deployment_id = str(uuid.uuid4()) if expect_failure: with self.assertRaises(RuntimeError) as cm: deploy( dsl_path=resource(blueprint), deployment_id=deployment_id, inputs=inputs) self.assertIn('Failing task on user defined exception', str(cm.exception)) else: deploy(resource(blueprint), deployment_id=deployment_id, inputs=inputs) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id )[invocations_type] self.assertEqual(expected_retries + 1, len(invocations)) for i in range(len(invocations) - 1): self.assertLessEqual(expected_interval, invocations[i+1] - invocations[i])
def _local_task_fail_impl(self, wf_name): if self.do_get: deploy(resource('dsl/workflow_api.yaml'), wf_name, parameters={'do_get': self.do_get}) else: self.assertRaises(RuntimeError, deploy, resource('dsl/workflow_api.yaml'), wf_name, parameters={'do_get': self.do_get})
def test_deployment_logs(self): message = 'TEST MESSAGE' inputs = {'message': message} dsl_path = resource("dsl/deployment_logs.yaml") deployment, _ = deploy(dsl_path, inputs=inputs) work_dir = testenv.testenv_instance.test_working_dir deployment_log_path = os.path.join(work_dir, 'cloudify.management', 'work', 'logs', '{0}.log'.format(deployment.id)) def verify_logs_exist_with_content(): print deployment_log_path self.assertTrue(os.path.isfile(deployment_log_path)) with open(deployment_log_path) as f: self.assertIn(message, f.read()) verify_logs_exist_with_content() undeploy(deployment.id, is_delete_deployment=True) # Verify log file id truncated on deployment delete with open(deployment_log_path) as f: self.assertTrue('' == f.read()) deployment, _ = deploy(dsl_path, inputs=inputs, deployment_id=deployment.id) # Verify new deployment with the same deployment id # can write to the previous location. verify_logs_exist_with_content()
def test_deployment_logs(self): message = 'TEST MESSAGE' inputs = {'message': message} dsl_path = resource("dsl/deployment_logs.yaml") deployment, _ = deploy(dsl_path, inputs=inputs) work_dir = testenv.testenv_instance.test_working_dir deployment_log_path = os.path.join( work_dir, 'cloudify.management', 'work', 'logs', '{0}.log'.format(deployment.id)) def verify_logs_exist_with_content(): print deployment_log_path self.assertTrue(os.path.isfile(deployment_log_path)) with open(deployment_log_path) as f: self.assertIn(message, f.read()) verify_logs_exist_with_content() undeploy(deployment.id, is_delete_deployment=True) # Verify log file id truncated on deployment delete with open(deployment_log_path) as f: self.assertTrue('' == f.read()) deployment, _ = deploy(dsl_path, inputs=inputs, deployment_id=deployment.id) # Verify new deployment with the same deployment id # can write to the previous location. verify_logs_exist_with_content()
def test_deploy_with_operation_executor_override(self): dsl_path = resource('dsl/operation_executor_override.yaml') deployment, _ = deploy(dsl_path) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] start_invocation = self.get_plugin_data( plugin_name='target_aware_mock_plugin', deployment_id=deployment.id )[webserver_node.id]['start'] expected_start_invocation = {'target': deployment.id} self.assertEqual(expected_start_invocation, start_invocation) plugin_installer_data = self.get_plugin_data( plugin_name='plugin_installer', deployment_id=deployment.id ) deployment_operations_worker_name = deployment.id # target_aware_mock_plugin should have been installed # on the deployment worker as well because 'start' # overrides the executor self.assertEqual( plugin_installer_data[ deployment_operations_worker_name ]['target_aware_mock_plugin'], ['installed']) undeploy(deployment_id=deployment.id)
def test_executions_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_sort('executions', ['deployment_id', '-status'])
def test_workflow_deployment_scaling_groups(self): deployment, _ = deploy(resource('dsl/store-scaling-groups.yaml'), workflow_name='workflow') instance = self.client.node_instances.list(deployment.id)[0] self.assertEqual( ['node'], instance.runtime_properties['scaling_groups']['group1']['members'])
def launch_deployment(self, yaml_file, expected_num_of_node_instances=1): deployment, _ = deploy(resource(yaml_file)) self.deployment = deployment self.node_instances = self.client.node_instances.list(deployment.id) self.assertEqual(expected_num_of_node_instances, len(self.node_instances)) self.wait_for_executions(self.NUM_OF_INITIAL_WORKFLOWS)
def test_simple(self): parameters = { 'do_get': self.do_get, 'key': 'key1', 'value': 'value1' } result_dict = { 'key1': 'value1' } deployment, _ = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters=parameters) # testing workflow remote task invocation = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id )['mock_operation_invocation'][0] self.assertDictEqual(result_dict, invocation) # testing workflow local task instance = self.client.node_instances.list( deployment_id=deployment.id)[0] # I am in love with eventual consistency instance = self.client.node_instances.get(instance.id) self.assertEqual('test_state', instance.state)
def test_plugin_workdir(self): filename = 'test_plugin_workdir.txt' host_content = 'HOST_CONTENT' central_content = 'CENTRAL_CONTENT' dsl_path = resource("dsl/plugin_workdir.yaml") deployment, _ = deploy(dsl_path, inputs={ 'filename': filename, 'host_content': host_content, 'central_content': central_content }) host_id = self.client.node_instances.list(node_id='host').items[0].id from testenv import testenv_instance test_workdir = testenv_instance.test_working_dir central_agent = CeleryWorkerProcess(['cloudify.management'], test_workdir) host_agent = CeleryWorkerProcess([host_id], test_workdir) central_file = os.path.join(central_agent.workdir, 'deployments', deployment.id, 'plugins', 'testmockoperations', filename) host_file = os.path.join(host_agent.workdir, 'plugins', 'testmockoperations', filename) with open(central_file) as f: self.assertEqual(central_content, f.read()) with open(host_file) as f: self.assertEqual(host_content, f.read())
def test_post_source_started_location_target(self): dsl_path = resource( "dsl/relationship_interface_post_source_location_target.yaml") deployment, _ = deploy(dsl_path) self.verify_assertions(deployment.id, hook='post-init', runs_on_source=False)
def test_pre_source_started_location_source(self): dsl_path = resource( "dsl/relationship_interface_pre_source_location_source.yaml") deployment, _ = deploy(dsl_path) self.verify_assertions(deployment.id, hook='pre-init', runs_on_source=True)
def test_plugin_workdir(self): filename = 'test_plugin_workdir.txt' host_content = 'HOST_CONTENT' central_content = 'CENTRAL_CONTENT' dsl_path = resource("dsl/plugin_workdir.yaml") deployment, _ = deploy(dsl_path, inputs={ 'filename': filename, 'host_content': host_content, 'central_content': central_content }) host_id = self.client.node_instances.list(node_id='host').items[0].id from testenv import testenv_instance test_workdir = testenv_instance.test_working_dir central_agent = CeleryWorkerProcess(['cloudify.management'], test_workdir) host_agent = CeleryWorkerProcess([host_id], test_workdir) central_file = os.path.join( central_agent.workdir, 'deployments', deployment.id, 'plugins', 'testmockoperations', filename) host_file = os.path.join( host_agent.workdir, 'plugins', 'testmockoperations', filename) with open(central_file) as f: self.assertEqual(central_content, f.read()) with open(host_file) as f: self.assertEqual(host_content, f.read())
def test_threshold_policy(self): dsl_path = resource("dsl/with_policies2.yaml") deployment, _ = deploy(dsl_path) self.deployment_id = deployment.id self.instance_id = self.wait_for_node_instance().id class Tester(object): def __init__(self, test_case, threshold, current_executions, current_invocations): self.test_case = test_case self.current_invocations = current_invocations self.current_executions = current_executions self.threshold = threshold def publish_above_threshold(self, deployment_id, do_assert): self.test_case.logger.info('Publish above threshold') self.test_case.publish(self.threshold + 1) if do_assert: self.inc() self.assertion(deployment_id, upper=True) def publish_below_threshold(self, deployment_id, do_assert): self.test_case.logger.info('Publish below threshold') self.test_case.publish(self.threshold - 1) if do_assert: self.inc() self.assertion(deployment_id, upper=False) def inc(self): self.current_executions += 1 self.current_invocations += 1 def assertion(self, deployment_id, upper): self.test_case.logger.info('waiting for {} executions'.format( self.current_executions)) self.test_case.wait_for_executions(self.current_executions) self.test_case.logger.info('waiting for {} invocations'.format( self.current_invocations)) invocations = self.test_case.wait_for_invocations( deployment_id, self.current_invocations) if upper: key = 'upper' value = self.threshold + 1 else: key = 'lower' value = self.threshold - 1 self.test_case.assertEqual( invocations[-1][key], value, 'key: {}, expected: {}'.format(key, value)) tester = Tester(test_case=self, threshold=100, current_executions=2, current_invocations=0) for _ in range(2): tester.publish_above_threshold(deployment.id, do_assert=True) tester.publish_above_threshold(deployment.id, do_assert=False) tester.publish_below_threshold(deployment.id, do_assert=True) tester.publish_below_threshold(deployment.id, do_assert=False)
def test_illegal_non_graph_to_graph_mode(self): if not self.do_get: # no need to run twice return self.assertRaises(RuntimeError, deploy, resource('dsl/workflow_api.yaml'), self._testMethodName)
def test_modification_operations(self): dsl_path = resource("dsl/deployment_modification_operations.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id execute_workflow('deployment_modification', deployment_id) invocations = self.get_plugin_data( 'testmockoperations', deployment_id)['mock_operation_invocation'] self.assertEqual( 1, len([i for i in invocations if i['operation'] == 'create'])) self.assertEqual( 2, len([i for i in invocations if i['operation'] == 'preconfigure'])) self.assertEqual( 2, len([i for i in invocations if i['operation'] == 'preconfigure'])) configure_invocations = [ i for i in invocations if i['operation'] == 'configure' ] self.assertEqual(1, len(configure_invocations)) self.assertEqual(1, len(configure_invocations[0]['target_ids'])) start_invocations = [ i for i in invocations if i['operation'] == 'start' ] self.assertEqual(1, len(start_invocations)) self.assertEqual(2, len(start_invocations[0]['target_ids']))
def test_deploy_with_operation_executor_override(self): dsl_path = resource('dsl/operation_executor_override.yaml') deployment, _ = deploy(dsl_path) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] start_invocation = self.get_plugin_data( plugin_name='target_aware_mock_plugin', deployment_id=deployment.id )[webserver_node.id]['start'] expected_start_invocation = {'target': deployment.id} self.assertEqual(expected_start_invocation, start_invocation) agent_data = self.get_plugin_data( plugin_name='agent', deployment_id=deployment.id ) deployment_operations_worker_name = deployment.id # target_aware_mock_plugin should have been installed # on the deployment worker as well because 'start' # overrides the executor self.assertEqual( agent_data[ deployment_operations_worker_name ]['target_aware_mock_plugin'], ['installed']) undeploy(deployment_id=deployment.id)
def test_delete_botched_deployment(self): from testenv import testenv_instance storage_file_path = os.path.join( testenv_instance.plugins_storage_dir, 'agent.json' ) dsl_path = resource('dsl/basic.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) data = { deployment_id: {'raise_exception_on_delete': True} } with open(storage_file_path, 'w') as f: json.dump(data, f) self.client.blueprints.upload(dsl_path, blueprint_id) self.client.deployments.create(blueprint_id, deployment_id) execution = \ self.client.executions.list(deployment_id, include_system_workflows=True)[0] wait_for_execution_to_end(execution) self.client.deployments.delete(deployment_id) try: self.client.deployments.get(deployment_id) self.fail("Expected deployment to be deleted") except CloudifyClientError as e: self.assertEquals(404, e.status_code)
def test_executions_pagination(self): deployment = deploy(resource('dsl/pagination.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_pagination( partial(self.client.executions.list, deployment_id=deployment.id))
def test_uninstall_with_dependency_order(self): dsl_path = resource( "dsl/uninstall_dependencies-order-with-three-nodes.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id undeploy(deployment_id) # Checking that uninstall wasn't called on the contained node states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment_id)['state'] node1_id = states[0]['id'] node2_id = states[1]['id'] node3_id = states[2]['id'] unreachable_call_order = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment_id)['unreachable_call_order'] self.assertEquals(3, len(unreachable_call_order)) self.assertEquals(node3_id, unreachable_call_order[0]['id']) self.assertEquals(node2_id, unreachable_call_order[1]['id']) self.assertEquals(node1_id, unreachable_call_order[2]['id']) configurer_state = self.get_plugin_data( plugin_name='connection_configurer_mock', deployment_id=deployment_id)['state'] self.assertEquals(2, len(configurer_state)) self.assertTrue( configurer_state[0]['id'].startswith('contained_in_node2')) self.assertTrue( configurer_state[0]['related_id'].startswith('contained_in_node1')) self.assertTrue( configurer_state[1]['id'].startswith('containing_node')) self.assertTrue( configurer_state[1]['related_id'].startswith('contained_in_node1'))
def test_executions_pagination(self): deployment = deploy(resource('dsl/pagination.yaml')) for i in range(5): execute_workflow('install', deployment.id) execute_workflow('uninstall', deployment.id) self._test_pagination(partial(self.client.executions.list, deployment_id=deployment.id))
def test_cancel_on_wait_for_task_termination(self): _, eid = deploy( resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def _execute_and_cancel_execution(self, workflow_id, force=False, wait_for_termination=True, is_wait_for_asleep_node=True): dsl_path = resource('dsl/sleep_workflows.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) self.client.blueprints.upload(dsl_path, blueprint_id) self.client.deployments.create(blueprint_id, deployment_id) do_retries(verify_deployment_environment_creation_complete, 30, deployment_id=deployment_id) execution = self.client.executions.start( deployment_id, workflow_id) node_inst_id = self.client.node_instances.list(deployment_id)[0].id if is_wait_for_asleep_node: for retry in range(30): if self.client.node_instances.get( node_inst_id).state == 'asleep': break time.sleep(1) else: raise RuntimeError("Execution was expected to go" " into 'sleeping' status") execution = self.client.executions.cancel(execution.id, force) expected_status = Execution.FORCE_CANCELLING if force else \ Execution.CANCELLING self.assertEquals(expected_status, execution.status) if wait_for_termination: wait_for_execution_to_end(execution) execution = self.client.executions.get(execution.id) return execution, deployment_id
def test_delete_botched_deployment(self): from testenv import testenv_instance storage_file_path = os.path.join(testenv_instance.plugins_storage_dir, 'agent.json') dsl_path = resource('dsl/basic.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) data = {deployment_id: {'raise_exception_on_delete': True}} with open(storage_file_path, 'w') as f: json.dump(data, f) self.client.blueprints.upload(dsl_path, blueprint_id) self.client.deployments.create(blueprint_id, deployment_id) execution = \ self.client.executions.list(deployment_id, include_system_workflows=True)[0] wait_for_execution_to_end(execution) self.client.deployments.delete(deployment_id) try: self.client.deployments.get(deployment_id) self.fail("Expected deployment to be deleted") except CloudifyClientError as e: self.assertEquals(404, e.status_code)
def test_deploy_with_agent_worker_windows_3_2(self): dsl_path = resource('dsl/with_agent_worker_windows_3_2.yaml') deployment, _ = deploy(dsl_path, timeout_seconds=500) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id ) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] invocations = self.get_plugin_data( plugin_name='mock_agent_plugin', deployment_id=deployment.id )[webserver_node.id] agent_installer_data = self.get_plugin_data( plugin_name='windows_agent_installer', deployment_id=deployment.id ) # agent on host should have been started and restarted self.assertEqual( agent_installer_data[webserver_node.host_id]['states'], ['created', 'configured', 'started', 'restarted']) plugin_installer_data = self.get_plugin_data( plugin_name='windows_plugin_installer', deployment_id=deployment.id ) self.assertEqual( plugin_installer_data[ webserver_node.host_id ]['mock_agent_plugin'], ['installed']) expected_invocations = ['create', 'start'] self.assertListEqual(invocations, expected_invocations) undeploy(deployment_id=deployment.id) invocations = self.get_plugin_data( plugin_name='mock_agent_plugin', deployment_id=deployment.id )[webserver_node.id] expected_invocations = ['create', 'start', 'stop', 'delete'] self.assertListEqual(invocations, expected_invocations) # agent on host should have also # been stopped and uninstalled agent_installer_data = self.get_plugin_data( plugin_name='windows_agent_installer', deployment_id=deployment.id ) self.assertEqual( agent_installer_data[webserver_node.host_id]['states'], ['created', 'configured', 'started', 'restarted', 'stopped', 'deleted'])
def _make_archive_file(self, blueprint_path, write_mode='w'): dsl_path = resource(blueprint_path) blueprint_dir = os.path.dirname(dsl_path) archive_location = tempfile.mkstemp()[1] arcname = os.path.basename(blueprint_dir) with tarfile.open(archive_location, write_mode) as tar: tar.add(blueprint_dir, arcname=arcname) return archive_location
def test_cancel_on_task_retry_interval(self): self.configure(retries=2, interval=1000000) _, eid = deploy( resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def setUp(self): super(RestAPITest, self).setUp() dsl_path = resource('dsl/basic.yaml') self.node_id = 'webserver_host' self.blueprint_id = 'blueprint-' + str(uuid.uuid4()) self.deployment_id = 'deployment-' + str(uuid.uuid4()) self.client.blueprints.upload(dsl_path, self.blueprint_id) self.client.deployments.create(self.blueprint_id, self.deployment_id)
def test_deployment_modifications_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={'node': {'instances': i}}) self.client.deployment_modifications.finish(modification.id) self._test_sort('deployment_modifications', 'deployment_id')
def test_deployment_modifications_pagination(self): deployment = deploy(resource("dsl/pagination.yaml")) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={"node": {"instances": i}} ) self.client.deployment_modifications.finish(modification.id) self._test_pagination(partial(self.client.deployment_modifications.list, deployment_id=deployment.id))
def test_cancel_on_wait_for_task_termination(self): _, eid = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def test_deploy_multi_instance_many_different_hosts(self): dsl_path = resource("dsl/multi_instance_many_different_hosts.yaml") deployment, _ = deploy(dsl_path) machines = set(self.get_plugin_data(plugin_name="cloudmock", deployment_id=deployment.id)["machines"]) self.assertEquals(15, len(machines)) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host1"), machines))) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host2"), machines))) self.assertEquals(5, len(filter(lambda ma: ma.startswith("host3"), machines)))
def test_cancel_on_task_retry_interval(self): self.configure(retries=2, interval=1000000) _, eid = deploy(resource('dsl/workflow_api.yaml'), self._testMethodName, parameters={'do_get': self.do_get}, wait_for_execution=False) self.wait_for_execution_status(eid, status=Execution.STARTED) self.client.executions.cancel(eid) self.wait_for_execution_status(eid, status=Execution.CANCELLED)
def test_get_blueprint(self): dsl_path = resource("dsl/basic.yaml") blueprint_id = str(uuid.uuid4()) deployment, _ = deploy(dsl_path, blueprint_id=blueprint_id) self.assertEqual(blueprint_id, deployment.blueprint_id) blueprint = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, blueprint.id) self.assertTrue(len(blueprint['plan']) > 0)
def test_deployment_modifications_sort(self): deployment = deploy(resource('dsl/sort.yaml')) for i in range(2, 12): modification = self.client.deployment_modifications.start( deployment_id=deployment.id, nodes={'node': { 'instances': i }}) self.client.deployment_modifications.finish(modification.id) self._test_sort('deployment_modifications', 'deployment_id')
def test_script_mapping(self): dsl_path = resource('dsl/test_script_mapping.yaml') deployment, _ = deploy(dsl_path) execute_workflow('workflow', deployment.id) data = self.get_plugin_data(plugin_name='script', deployment_id=deployment.id) self.assertEqual(data['op1_called_with_property'], 'op2_called') self.assertEqual(data['op2_prop'], 'op2_value')
def test_start_monitor_node_operation(self): dsl_path = resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = deploy(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['monitoring_operations_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('start_monitor', invocation['operation'])
def launch_deployment(self, yaml_file, expected_num_of_node_instances=1): deployment, _ = deploy(resource(yaml_file)) self.deployment = deployment self.node_instances = self.client.node_instances.list(deployment.id) self.assertEqual( expected_num_of_node_instances, len(self.node_instances) ) self.wait_for_executions(self.NUM_OF_INITIAL_WORKFLOWS)
def test_execute_operation_failure(self): deployment_id = str(uuid.uuid4()) dsl_path = resource("dsl/basic.yaml") try: deploy(dsl_path, deployment_id=deployment_id) self.fail('expected exception') except Exception as e: if e.message: self.logger.info(e.message) pass
def _test_custom_workflow(self, workflow, error_expected=False): deployment = deploy(resource("dsl/basic_task_not_exist.yaml")) try: execute_workflow(workflow, deployment.id) if error_expected: self.fail('RuntimeError expected') except RuntimeError as e: if not error_expected: self.fail('Success expected. error message: {0}'.format(e)) self.assertIn(self.AGENT_ALIVE_FAIL, str(e))
def test_start_monitor_node_operation(self): dsl_path = resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = deploy(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id )['monitoring_operations_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('start_monitor', invocation['operation'])
def test_failed_uninstall_task(self): dsl_path = resource('dsl/basic_stop_error.yaml') deployment, _ = deploy(dsl_path) deployment_id = deployment.id undeploy(deployment_id) machines = self.get_plugin_data( plugin_name='cloudmock', deployment_id=deployment_id)['machines'] self.assertEquals(0, len(machines))
def test_deployment_workflows(self): dsl_path = resource("dsl/custom_workflow_mapping.yaml") deployment, _ = deploy(dsl_path) deployment_id = deployment.id workflows = self.client.deployments.get(deployment_id).workflows self.assertEqual(3, len(workflows)) wf_ids = [x.name for x in workflows] self.assertTrue('uninstall' in wf_ids) self.assertTrue('install' in wf_ids) self.assertTrue('custom' in wf_ids)
def test_node_operation_different_inputs(self): """ Tests storing different nodes with different structured inputs for the same operation. """ blueprint_id = str(uuid.uuid4()) blueprint = self.client.blueprints.upload( resource("dsl/two_nodes_different_inputs.yaml"), blueprint_id) deployment_id = str(uuid.uuid4()) self.client.deployments.create(blueprint.id, deployment_id)
def test_plugin_get_resource(self): dsl_path = resource("dsl/get_resource_in_plugin.yaml") deployment, _ = deploy(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['get_resource_operation_invocation'] self.assertEquals(1, len(invocations)) invocation = invocations[0] with open(resource("dsl/basic.yaml")) as f: basic_data = f.read() # checking the resources are the correct data self.assertEquals(basic_data, invocation['res1_data']) self.assertEquals(basic_data, invocation['res2_data']) # checking the custom filepath provided is indeed where the second # resource was saved self.assertEquals(invocation['custom_filepath'], invocation['res2_path'])