def test_component_creation_with_blueprint_id(self): component_blueprint = """ tosca_definitions_version: cloudify_dsl_1_3 imports: - cloudify/types/types.yaml capabilities: test: value: 1 """ blueprint_path = self.make_yaml_file(component_blueprint) self.client.blueprints.upload(blueprint_path, entity_id=self.basic_blueprint_id) wait_for_blueprint_upload(self.basic_blueprint_id, self.client, True) deployment_id = 'd{0}'.format(uuid.uuid4()) dsl_path = resource('dsl/component_with_blueprint_id.yaml') self.deploy_application(dsl_path, deployment_id=deployment_id) self._validate_component_capabilities(deployment_id, {'test': 1}) self.assertTrue(self.client.deployments.get(self.component_name)) self.undeploy_application(deployment_id, is_delete_deployment=True) self.assertRaises(CloudifyClientError, self.client.deployments.get, self.component_name) self.assertRaises(CloudifyClientError, self.client.deployments.get, deployment_id)
def test_3_layer_cascading_workflow(self): layer_3_path = self.make_yaml_file( self.component_blueprint_with_nothing_workflow) self.client.blueprints.upload(layer_3_path, entity_id='layer_3') wait_for_blueprint_upload('layer_3', self.client) layer_2 = self.generate_root_blueprint_with_component( 'layer_3', 'other_component') layer_2_path = self.make_yaml_file(layer_2) self.client.blueprints.upload(layer_2_path, entity_id='layer_2') wait_for_blueprint_upload('layer_2', self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_root_blueprint_with_component( 'layer_2', 'component') main_blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(main_blueprint_path, deployment_id=deployment_id, timeout_seconds=120) self.client.executions.start(deployment_id, 'nothing_workflow') executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.wait_for_execution_to_end(execution) executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.assertEqual(Execution.TERMINATED, execution.status) self.assertEqual(len(executions), 3)
def test_cascading_workflow_with_parameters(self): basic_blueprint_path = self.make_yaml_file( self.component_blueprint_with_nothing_workflow) self.client.blueprints.upload(basic_blueprint_path, entity_id='workflow') wait_for_blueprint_upload('workflow', self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_root_blueprint_with_component() main_blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(main_blueprint_path, deployment_id=deployment_id) parameters = {'param': 1} self.client.executions.start(deployment_id, 'nothing_workflow', parameters=parameters, allow_custom_parameters=True) executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.wait_for_execution_to_end(execution) executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.assertEqual(Execution.TERMINATED, execution.status) self.assertTrue(execution.parameters, parameters)
def test_many_deployments_are_updated(self): self.setup_deployment_ids = ['d{0}'.format(uuid.uuid4()) for _ in range(5)] self.setup_node_id = 'node' self.plugin_name = 'version_aware' self.client.blueprints.upload( path=self._get_dsl_blueprint_path(''), entity_id=self.base_blueprint_id) wait_for_blueprint_upload(self.base_blueprint_id, self.client) blueprint = self.client.blueprints.get(self.base_blueprint_id) for dep_id in self.setup_deployment_ids: self.client.deployments.create(blueprint.id, dep_id) wait_for_deployment_creation_to_complete( self.env.container_id, dep_id, self.client ) self.execute_workflow('install', dep_id) self._upload_v_2_plugin() # Execute base (V 1.0) workflows for dep_id in self.setup_deployment_ids: self.setup_deployment_id = dep_id self._execute_workflows() self._assert_host_values(self.versions[0]) plugins_update = self._perform_plugins_update() self.assertEqual(plugins_update.state, STATES.SUCCESSFUL) # Execute mod (V 2.0) workflows for dep_id in self.setup_deployment_ids: self.setup_deployment_id = dep_id self._execute_workflows() self._assert_host_values(self.versions[1])
def test_get_workflow_parameters(self): dsl_path = resource('dsl/workflow_parameters.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) self.client.blueprints.upload(dsl_path, blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) self.client.deployments.create(blueprint_id, deployment_id, skip_plugins_validation=True) workflows = self.client.deployments.get(deployment_id).workflows execute_op_workflow = next(wf for wf in workflows if wf.name == 'another_execute_operation') expected_params = { u'node_id': { u'default': u'test_node' }, u'operation': {}, u'properties': { u'default': { u'key': u'test_key', u'value': u'test_value' } } } self.assertEqual(expected_params, execute_op_workflow.parameters)
def test_uploading_different_version_plugin_than_existing(self): mock_id = upload_mock_plugin(self.client, self.TEST_PACKAGE_NAME, self.TEST_PACKAGE_VERSION)['id'] self.wait_for_all_executions_to_end() basic_blueprint_path = resource('dsl/empty_blueprint.yaml') self.client.blueprints.upload(basic_blueprint_path, entity_id=self.basic_blueprint_id) wait_for_blueprint_upload(self.basic_blueprint_id, self.client, True) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.test_blueprint.format( 'https://cloudify-tests-files.s3-eu-west-1.amazonaws.com/plugins', 'cloudify_script_plugin/2_0/' 'cloudify_script_plugin-2.0-py27-none-any.wgn', 'cloudify_script_plugin/2_0/plugin.yaml') blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(blueprint_path, deployment_id=deployment_id) plugins_list = self.client.plugins.list() self.assertEqual(len(plugins_list), 2) self.assertTrue(plugins_list[0]['package_version'], self.TEST_PACKAGE_NAME) self.assertTrue(plugins_list[1]['package_version'], self.TEST_PACKAGE_NAME) self.undeploy_application(deployment_id) self.assertEqual(len(self.client.plugins.list()), 1) self.client.plugins.delete(mock_id)
def test_scaled_relationships(self): deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_blueprint('create') + """ groups: group1: members: [node, depends_on_operation_node] policies: policy: type: cloudify.policies.scaling targets: [group1] properties: default_instances: 2 """ base_blueprint_path = utils.get_resource('dsl/mock_workflows.yaml') self.client.blueprints.upload(base_blueprint_path, 'mock_workflows') utils.wait_for_blueprint_upload('mock_workflows', self.client) main_blueprint_path = self.make_yaml_file(main_blueprint) _, execution_id = self.deploy_application(main_blueprint_path, deployment_id=deployment_id) task_graphs = self.client.tasks_graphs.list(execution_id, 'install') operations_info = {} operations_id = {} for graph in task_graphs: operations = self.client.operations.list(graph.id) for op in operations: operations_id[op.id] = {} operations_id[op.id]['dependencies'] = op.dependencies operations_id[op.id]['info'] = op.info try: cloudify_context = op.parameters['task_kwargs']['kwargs'][ '__cloudify_context'] except KeyError: continue op_name = cloudify_context['operation']['name'] node_id = cloudify_context['node_id'] operations_info[(op_name, node_id)] = {} operations_info[(op_name, node_id)]['containing_subgraph']\ = op.containing_subgraph operations_info[(op_name, node_id)]['op_name'] = op_name install_subgraph_ids = [ v['containing_subgraph'] for (__, node), v in operations_info.items() if ('depends_on_operation_node' in node and v['op_name'] == 'cloudify.interfaces.lifecycle.configure') ] self.assertEqual(len(install_subgraph_ids), 2) for install_id in install_subgraph_ids: next_tasks_info = [ operations_id[dep]['info'] for dep in operations_id[install_id]['dependencies'] ] self.assertCountEqual(['Node instance created', 'created'], next_tasks_info)
def test_workflow_parameters_pass_from_blueprint(self): dsl_path = resource('dsl/workflow_parameters.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) self.client.blueprints.upload(dsl_path, blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) self.client.deployments.create(blueprint_id, deployment_id, skip_plugins_validation=True) do_retries(verify_deployment_env_created, 30, container_id=self.env.container_id, client=self.client, deployment_id=deployment_id) execution = self.client.executions.start(deployment_id, 'custom_execute_operation') self.wait_for_execution_to_end(execution) node_id = self.client.node_instances.list( deployment_id=deployment_id)[0].id node_instance = self.client.node_instances.get(node_id) invocations = node_instance.runtime_properties[ 'mock_operation_invocation'] self.assertEqual(1, len(invocations)) self.assertDictEqual(invocations[0], {'test_key': 'test_value'})
def test_remove_workflow(self): workflow_id = 'my_custom_workflow' deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('remove_workflow') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) # assert that 'update' workflow was executed self._wait_for_execution_to_terminate(deployment.id, 'update') self._wait_for_successful_state(dep_update.id) self.assertRaisesRegexp( CloudifyClientError, 'Workflow {0} does not exist in deployment {1}'.format( workflow_id, deployment.id), callable_obj=self.client.executions.start, deployment_id=deployment.id, workflow_id=workflow_id, parameters={'node_id': 'site1'}) deployment = self.client.deployments.get(dep_update.deployment_id) self.assertNotIn('my_custom_workflow', [w['name'] for w in deployment.workflows])
def test_success_deploy_namespaced_blueprint_with_scripts(self): basic_blueprint_path =\ resource('dsl/agent_tests/blueprint_with_scripts.yaml') blueprint_id = 'imported_scripts' self.client.blueprints.upload(basic_blueprint_path, entity_id=blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) dsl_path = resource('dsl/agent_tests/blueprints/' 'blueprint_with_namespaced_blueprint_import.yaml') _, execution_id = self.deploy_application(dsl_path, deployment_id=deployment_id) events = self.client.events.list(execution_id=execution_id, sort='timestamp') script_success_msg = "Task succeeded 'script_runner.tasks.run'" script_success_events = [ event['message'] for event in events if script_success_msg == event['message'] ] self.assertEqual(len(script_success_events), 1) agent_success_msg = 'Agent created' agent_success_events = [ event['message'] for event in events if agent_success_msg == event['message'] ] self.assertEqual(len(agent_success_events), 1)
def test_remove_property(self): deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('remove_property') node_mapping = {'affected_node': 'site1'} base_nodes, base_node_instances = \ self._map_node_and_node_instances(deployment.id, node_mapping) base_node = base_nodes['affected_node'][0] self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) # wait for 'update' workflow to finish self._wait_for_execution_to_terminate(deployment.id, 'update') self._wait_for_successful_state(dep_update.id) modified_nodes, modified_node_instances = \ self._map_node_and_node_instances(deployment.id, node_mapping) modified_node = modified_nodes['affected_node'][0] removed_property = modified_node['properties'].get('prop2') self.assertIsNone(removed_property) # assert nothing else changed self._assert_equal_dicts(base_node['properties'], modified_node['properties'], excluded_items=['prop2']) self._assert_equal_entity_dicts(base_nodes, modified_node, 'affected_node', excluded_items=['properties'])
def _execute_from_resource(self, workflow_id, workflow_params=None, resource_file=None): dsl_path = resource(resource_file) _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) self.client.blueprints.upload(dsl_path, blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client, True) self.client.deployments.create(blueprint_id, deployment_id, skip_plugins_validation=True) do_retries(verify_deployment_env_created, 30, container_id=self.env.container_id, deployment_id=deployment_id, client=self.client) execution = self.client.executions.start(deployment_id, workflow_id, parameters=workflow_params) node_inst_id = self.client.node_instances.list( deployment_id=deployment_id)[0].id return execution, node_inst_id, deployment_id
def test_add_workflow(self): deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('add_workflow') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) # assert that 'update' workflow was executed self._wait_for_execution_to_terminate(deployment.id, 'update') self.client.executions.start(dep_update.deployment_id, workflow_id='my_custom_workflow', parameters={ 'node_id': 'site1', 'delta': 2 }) self._wait_for_execution_to_terminate(deployment.id, 'my_custom_workflow') affected_node = self.client.node_instances.list( deployment_id=dep_update.deployment_id, node_id='site1') self.assertEqual(len(affected_node), 3) deployment = self.client.deployments.get(dep_update.deployment_id) self.assertIn('my_custom_workflow', [w['name'] for w in deployment.workflows])
def upload_blueprint_resource(self, dsl_resource_path, blueprint_id, client=None): client = client or self.client blueprint = get_resource(dsl_resource_path) client.blueprints.upload(blueprint, entity_id=blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client, True)
def test_add_remove_and_modify_relationship(self): """ site0 relationships: i | base | modification | comment ------------------------------------------------- 0. | site1 | site6 | new site (and removed site1) 1. | site2 | site4 | moved site (and removed site2) 2. | site3 | site2B | new site 3. | site4 | site3 | moved site 4. | site5 | - | remove site5 :return: """ deployment, modified_bp_path = self._deploy_and_get_modified_bp_path( 'add_remove_and_modify_relationship') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) self._wait_for_execution_to_terminate(deployment.id, 'update') self._wait_for_successful_state(dep_update.id) node_mapping = {'source': 'site0'} modified_nodes, modified_node_instances = \ self._map_node_and_node_instances(deployment.id, node_mapping) modified_node = modified_nodes['source'][0] modified_node_instance = modified_node_instances['source'][0] # Assert relationship order rel_targets = ['site6', 'site4', 'site2B', 'site3'] for index, rel_target in enumerate(rel_targets): self.assertEqual( modified_node['relationships'][index]['target_id'], rel_targets[index]) for index, rel_target in enumerate(rel_targets): self.assertEqual( modified_node_instance[ 'relationships'][index]['target_name'], rel_targets[index] ) # Assert all operation were executed # Pre update: # 1. establish site0->site3: source_ops_counter=1 # 2. establish site0->site4: source_ops_counter=2 # 3. establish site0->site5: source_ops_counter=3 # Post update: # 5. unlink site0->site1: source_ops_counter=4 # 6. unlink site0->site2: source_ops_counter=5 # 7. establish site0->site6: source_ops_counter=6 # 8. establish site0->site2B: source_ops_counter=7 self._assertDictContainsSubset( {'source_ops_counter': '7'}, modified_node_instance['runtime_properties'] )
def test_cascading_workflow_stopped_in_the_path(self): """ The user can define that the cascading workflow in the downstream Components is not cascading anymore. """ layer_3_path = self.make_yaml_file( self.component_blueprint_with_nothing_workflow) self.client.blueprints.upload(layer_3_path, entity_id='layer_3') wait_for_blueprint_upload('layer_3', self.client) layer_2 = """ tosca_definitions_version: cloudify_dsl_1_3 imports: - cloudify/types/types.yaml - plugin:mock_workflows node_templates: component_node: type: cloudify.nodes.Component properties: resource_config: blueprint: external_resource: true id: layer_3 deployment: id: other_component workflows: nothing_workflow: mapping: mock_workflows.mock_workflows.workflows.do_nothing is_cascading: false """ layer_2_path = self.make_yaml_file(layer_2) self.client.blueprints.upload(layer_2_path, entity_id='layer_2') wait_for_blueprint_upload('layer_2', self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_root_blueprint_with_component( 'layer_2', 'component') main_blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(main_blueprint_path, deployment_id=deployment_id, timeout_seconds=120) self.client.executions.start(deployment_id, 'nothing_workflow') executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.wait_for_execution_to_end(execution) executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.assertEqual(Execution.TERMINATED, execution.status) self.assertEqual(len(executions), 2)
def _start_a_workflow(self): # Start the create deployment workflow dsl_path = utils.get_resource('dsl/basic.yaml') blueprint_id = deployment_id = 'basic_{}'.format(uuid.uuid4()) self.client.blueprints.upload(dsl_path, blueprint_id) utils.wait_for_blueprint_upload(blueprint_id, self.client) self.client.deployments.create(blueprint_id, deployment_id) utils.wait_for_deployment_creation_to_complete(self.env.container_id, deployment_id, self.client) return deployment_id
def _upload_blueprints_and_deploy_base(self): self.deploy_application( dsl_path=self._get_dsl_blueprint_path(self.base_name), blueprint_id=self.base_blueprint_id, deployment_id=self.setup_deployment_id ) self.client.blueprints.upload( path=self._get_dsl_blueprint_path(self.mod_name), entity_id=self.mod_blueprint_id ) wait_for_blueprint_upload(self.mod_blueprint_id, self.client)
def test_blueprint_upload_batch_async(self): blueprint_filename = 'empty_blueprint.yaml' for i in range(5): self.client.blueprints.upload(resource( 'dsl/{}'.format(blueprint_filename)), entity_id='bp_{}'.format(i), async_upload=True) for i in range(5): blueprint_id = 'bp_{}'.format(i) wait_for_blueprint_upload(blueprint_id, self.client, False) blueprint = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint['state'], BlueprintUploadState.UPLOADED) self.assertEqual(blueprint.main_file_name, blueprint_filename) self.assertNotEqual(blueprint.plan, None)
def test_download_blueprint(self): self.client.blueprints.upload(self.original_blueprint_file, self.blueprint_id) wait_for_blueprint_upload(self.blueprint_id, self.client) self.client.blueprints.download( self.blueprint_id, output_file=self.downloaded_archive_path) self.assertTrue(os.path.exists(self.downloaded_archive_path)) self._extract_tar_file() downloaded_blueprint_file = os.path.join(self.downloaded_extracted_dir, 'blueprint/blueprint.yaml') self.assertTrue(os.path.exists(downloaded_blueprint_file)) self.assertTrue( filecmp.cmp(self.original_blueprint_file, downloaded_blueprint_file))
def test_node_operation_different_inputs(self): """ Tests storing different nodes with different structured inputs for the same operation. """ blueprint_id = 'b{0}'.format(uuid.uuid4()) self.client.blueprints.upload( resource("dsl/two_nodes_different_inputs.yaml"), blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) blueprint = self.client.blueprints.get(blueprint_id) deployment_id = 'd{0}'.format(uuid.uuid4()) self.client.deployments.create(blueprint.id, deployment_id, skip_plugins_validation=True)
def test_execution_parameters(self): dsl_path = resource('dsl/workflow_parameters.yaml') _id = uuid.uuid1() blueprint_id = 'blueprint_{0}'.format(_id) deployment_id = 'deployment_{0}'.format(_id) self.client.blueprints.upload(dsl_path, blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client, True) self.client.deployments.create(blueprint_id, deployment_id, skip_plugins_validation=True) do_retries(verify_deployment_env_created, 60, container_id=self.env.container_id, deployment_id=deployment_id, client=self.client) execution_parameters = { 'operation': 'test_interface.operation', 'properties': { 'key': 'different-key', 'value': 'different-value' }, 'custom-parameter': "doesn't matter" } execution = self.client.executions.start( deployment_id, 'another_execute_operation', parameters=execution_parameters, allow_custom_parameters=True) self.wait_for_execution_to_end(execution) invocations = self.get_runtime_property(deployment_id, 'mock_operation_invocation')[0] self.assertEqual(1, len(invocations)) self.assertDictEqual(invocations[0], {'different-key': 'different-value'}) # checking for execution parameters - expecting there to be a merge # with overrides with workflow parameters. expected_params = { 'node_id': 'test_node', 'operation': 'test_interface.operation', 'properties': { 'key': 'different-key', 'value': 'different-value' }, 'custom-parameter': "doesn't matter" } self.assertEqual(expected_params, execution.parameters)
def test_uninstall_execution_order(self): deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('uninstall_execution_order') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) self._wait_for_execution_to_terminate(deployment.id, 'update') self.assertFalse( self.client.node_instances.list( node_id='site1').items[0].runtime_properties['is_op_started'], 'Site2 operations were executed ' 'before/simultaneously with Site3 operations, ' 'although site3 connected_to site2')
def test_remove_output(self): deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('remove_output') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) # assert that 'update' workflow was executed self._wait_for_execution_to_terminate(deployment.id, 'update') self._wait_for_successful_state(dep_update.id) deployment = self.client.deployments.get(dep_update.deployment_id) self.assertNotIn('custom_output', deployment.outputs)
def test_deployment_inputs(self): blueprint_id = 'b{0}'.format(uuid.uuid4()) self.client.blueprints.upload(resource("dsl/basic.yaml"), blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) blueprint = self.client.blueprints.get(blueprint_id) inputs = blueprint.plan['inputs'] self.assertEqual(1, len(inputs)) self.assertTrue('install_agent' in inputs) self.assertFalse(inputs['install_agent']['default']) self.assertTrue(len(inputs['install_agent']['description']) > 0) deployment_id = 'd{0}'.format(uuid.uuid4()) deployment = self.client.deployments.create( blueprint.id, deployment_id, skip_plugins_validation=True) self.assertEqual(1, len(deployment.inputs)) self.assertTrue('install_agent' in deployment.inputs) self.assertFalse(deployment.inputs['install_agent'])
def test_add_description(self): deployment, modified_bp_path = \ self._deploy_and_get_modified_bp_path('add_description') self.client.blueprints.upload(modified_bp_path, BLUEPRINT_ID) wait_for_blueprint_upload(BLUEPRINT_ID, self.client) dep_update = \ self.client.deployment_updates.update_with_existing_blueprint( deployment.id, BLUEPRINT_ID) # assert that 'update' workflow was executed self._wait_for_execution_to_terminate(deployment.id, 'update') self._wait_for_successful_state(dep_update.id) deployment = self.client.deployments.get(dep_update.deployment_id) self.assertRegexpMatches(deployment['description'], 'new description')
def test_default_workflow_cascading_flag(self): basic_blueprint_path = self.make_yaml_file( self.component_blueprint_with_nothing_workflow) self.client.blueprints.upload(basic_blueprint_path, entity_id='workflow') wait_for_blueprint_upload('workflow', self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = """ tosca_definitions_version: cloudify_dsl_1_3 imports: - cloudify/types/types.yaml - plugin:mock_workflows node_templates: component_node: type: cloudify.nodes.Component properties: resource_config: blueprint: external_resource: true id: workflow deployment: id: test workflows: nothing_workflow: mapping: mock_workflows.mock_workflows.workflows.do_nothing """ main_blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(main_blueprint_path, deployment_id=deployment_id) main_execution = self.client.executions.start(deployment_id, 'nothing_workflow') executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.wait_for_execution_to_end(execution) executions = self.client.executions.list( workflow_id='nothing_workflow') for execution in executions: self.assertEqual(Execution.TERMINATED, execution.status) self.assertEqual(main_execution.created_by, execution.created_by)
def test_maintenance_mode(self): blueprint_id = 'b{0}'.format(uuid.uuid4()) deployment_id = blueprint_id blueprint_path = resource('dsl/agent_tests/maintenance_mode.yaml') self.client.blueprints.upload(blueprint_path, entity_id=blueprint_id) wait_for_blueprint_upload(blueprint_id, self.client) self.client.deployments.create(blueprint_id=blueprint_id, deployment_id=deployment_id) wait_for_deployment_creation_to_complete(self.env.container_id, deployment_id, self.client) # Running none blocking installation execution = self.client.executions.start(deployment_id=deployment_id, workflow_id='install') self.wait_for_execution_status(execution.id, status=Execution.STARTED) self.logger.info( "checking if maintenance status has status 'deactivated'") self._check_maintenance_status('deactivated') self.logger.info('activating maintenance mode') self.client.maintenance_mode.activate() self.addCleanup(self.cleanup) self.logger.info( "checking if maintenance status has changed to 'activating'") self.do_assertions(self._check_maintenance_status, timeout=60, status='activating') self.logger.info('cancelling installation') self.client.executions.cancel(execution['id']) self.logger.info( "checking if maintenance status has changed to 'activated'") self.do_assertions(self._check_maintenance_status, timeout=60, status='activated') self.logger.info('deactivating maintenance mode') self.client.maintenance_mode.deactivate() self.logger.info( "checking if maintenance status has changed to 'deactivated'") self.do_assertions(self._check_maintenance_status, timeout=60, status='deactivated')
def deploy(self, dsl_path=None, blueprint_id=None, deployment_id=None, inputs=None, wait=True, client=None, runtime_only_evaluation=False, blueprint_visibility=None, deployment_visibility=None): if not (dsl_path or blueprint_id): raise RuntimeWarning('Please supply blueprint path ' 'or blueprint id for deploying') client = client or self.client resource_id = uuid.uuid4() blueprint_id = blueprint_id or 'blueprint_{0}'.format(resource_id) if dsl_path: blueprint_upload_kw = {'path': dsl_path, 'entity_id': blueprint_id} # If not provided, use the client's default if blueprint_visibility: blueprint_upload_kw['visibility'] = blueprint_visibility client.blueprints.upload(**blueprint_upload_kw) wait_for_blueprint_upload(blueprint_id, client, True) blueprint = client.blueprints.get(blueprint_id) else: blueprint = None deployment_id = deployment_id or 'deployment_{0}'.format(resource_id) deployment_create_kw = { 'blueprint_id': blueprint.id if blueprint else blueprint_id, 'deployment_id': deployment_id, 'inputs': inputs, 'skip_plugins_validation': True, 'runtime_only_evaluation': runtime_only_evaluation } # If not provided, use the client's default if deployment_visibility: deployment_create_kw['visibility'] = deployment_visibility deployment = client.deployments.create(**deployment_create_kw) if wait: wait_for_deployment_creation_to_complete(self.env.container_id, deployment_id, client) return deployment
def test_cascading_queued_workflow_execution(self): basic_blueprint = """ tosca_definitions_version: cloudify_dsl_1_3 imports: - cloudify/types/types.yaml - plugin:mock_workflows workflows: nothing_workflow: mapping: mock_workflows.mock_workflows.workflows.simple_sleep is_cascading: true other_workflow: mapping: mock_workflows.mock_workflows.workflows.do_nothing is_cascading: true """ basic_blueprint_path = self.make_yaml_file(basic_blueprint) self.client.blueprints.upload(basic_blueprint_path, entity_id='workflow') wait_for_blueprint_upload('workflow', self.client) deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_root_blueprint_with_component( deployment_id='component') main_blueprint_path = self.make_yaml_file(main_blueprint) self.deploy_application(main_blueprint_path, deployment_id=deployment_id) self.execute_workflow('nothing_workflow', deployment_id) self.client.executions.start(deployment_id, 'nothing_workflow', queue=True) component_execution = self.client.executions.list( deployment_id='component', workflow_id='nothing_workflow', is_descending=True, sort='created_at')[0] self.assertEqual(component_execution.status, Execution.QUEUED) executions = self.client.executions.list( workflow_id='nothing_workflow') self.assertEqual(len(executions), 4)