def prepare_reset_storage_script(container_id): reset_script = get_resource('scripts/reset_storage.py') prepare = get_resource('scripts/prepare_reset_storage.py') copy_file_to_manager(container_id, reset_script, SCRIPT_PATH) copy_file_to_manager(container_id, prepare, PREPARE_SCRIPT_PATH) execute(container_id, [MANAGER_PYTHON, PREPARE_SCRIPT_PATH, '--config', CONFIG_PATH])
def _prepare_hello_world(self): logger = self.test_case.logger repo_name = 'cloudify-hello-world-example' branch = self.test_case.env.core_branch_name workdir = self.test_case.env.test_working_dir blueprint_tar = os.path.join(workdir, 'hello.tar.gz') blueprint_dir = os.path.join(workdir, '{0}-{1}'.format(repo_name, branch)) blueprint_file = os.path.join(blueprint_dir, 'dockercompute_blueprint.yaml') if not os.path.exists(blueprint_dir): logger.info('Downloading hello world tar') helloworld_url = _HELLO_WORLD_URL.format(repo_name, branch) response = requests.get(helloworld_url, stream=True) with open(blueprint_tar, 'wb') as f: for chunk in response.iter_content(chunk_size=8192): if chunk: f.write(chunk) with tarfile.open(blueprint_tar, 'r:gz') as tar: tar.extractall(path=workdir) shutil.copy( test_utils.get_resource( 'dsl/agent_tests/dockercompute_helloworld.yaml'), blueprint_file) shutil.copy( test_utils.get_resource( 'dsl/agent_tests/plugins/diamond.yaml' ), os.path.join(blueprint_dir, 'diamond.yaml')) shutil.copy( test_utils.get_resource( 'dsl/agent_tests/plugins/dockercompute.yaml' ), os.path.join(blueprint_dir, 'dockercompute.yaml')) else: logger.info('Reusing existing hello world tar') if self.modify_blueprint_func: new_blueprint_dir = os.path.join(self.test_case.workdir, 'test-hello-world') if os.path.isdir(new_blueprint_dir): shutil.rmtree(new_blueprint_dir) shutil.copytree(blueprint_dir, new_blueprint_dir) blueprint_dir = new_blueprint_dir blueprint_file = os.path.join(blueprint_dir, 'dockercompute_blueprint.yaml') with utils.YamlPatcher(blueprint_file) as patcher: self.modify_blueprint_func(patcher, blueprint_dir) return blueprint_file
def test_deployment_update_with_labels(self): deployment = self._create_deployment_from_blueprint_with_labels() new_blueprint_id = 'update_labels' dsl_path = utils.get_resource('dsl/updated_blueprint_with_labels.yaml') blueprint = self.client.blueprints.upload(dsl_path, new_blueprint_id) self.client.deployment_updates.update_with_existing_blueprint( deployment.id, new_blueprint_id) updated_deployment = self.client.deployments.get(deployment.id) deployment_labels_list = [{ 'key1': 'key1_val1' }, { 'key2': 'key2_val1' }, { 'key2': 'key2_val2' }, { 'key2': 'updated_key2_val1' }, { 'updated_key': 'updated_key_val1' }, { 'updated_key': 'updated_key_val2' }] blueprint_labels_list = [{ 'bp_key1': 'updated_bp_key1_val1' }, { 'bp_key2': 'bp_key2_val1' }, { 'bp_key2': 'updated_bp_key2_val2' }, { 'updated_bp_key': 'updated_bp_key_val1' }] self.assert_labels(blueprint['labels'], blueprint_labels_list) self.assert_labels(updated_deployment.labels, deployment_labels_list)
def test_delete_blueprint(self): dsl_path = get_resource("dsl/basic.yaml") blueprint_id = self.client.blueprints.upload(dsl_path, str(uuid.uuid4())).id # verifying blueprint exists result = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, result.id) # deleting blueprint deleted_bp_id = self.client.blueprints.delete(blueprint_id).id self.assertEqual(blueprint_id, deleted_bp_id) # verifying blueprint does no longer exist try: self.client.blueprints.get(blueprint_id) self.fail("Got blueprint {0} successfully even though it " "wasn't expected to exist".format(blueprint_id)) except CloudifyClientError: pass # trying to delete a nonexistent blueprint try: self.client.blueprints.delete(blueprint_id) self.fail("Deleted blueprint {0} successfully even though it " "wasn't expected to exist".format(blueprint_id)) except CloudifyClientError: pass
def test_deploy_with_operation_executor_override(self): dsl_path = get_resource('dsl/operation_executor_override.yaml') deployment, _ = self.deploy_application(dsl_path) deployment_nodes = self.client.node_instances.list( deployment_id=deployment.id) webserver_nodes = filter(lambda node: 'host' not in node.node_id, deployment_nodes) self.assertEquals(1, len(webserver_nodes)) webserver_node = webserver_nodes[0] start_invocation = self.get_plugin_data( plugin_name='target_aware_mock_plugin', deployment_id=deployment.id)[webserver_node.id]['start'] expected_start_invocation = {'target': 'cloudify.management'} self.assertEqual(expected_start_invocation, start_invocation) agent_data = self.get_plugin_data(plugin_name='agent', deployment_id=deployment.id) # target_aware_mock_plugin should have been installed # on the management worker as well because 'start' # overrides the executor (with a local task) self.assertEqual(agent_data['local']['target_aware_mock_plugin'], ['installed']) self.undeploy_application(deployment_id=deployment.id)
def test_deployment_creation_workflow(self): dsl_path = get_resource( 'dsl/basic_with_deployment_plugin_and_workflow_plugin.yaml') deployment, _ = self.deploy_application(dsl_path) deployment_dir_path = os.path.join('/opt/mgmtworker/work/deployments', deployment.id) self.execute_on_manager('test -d {0}'.format(deployment_dir_path)) # assert plugin installer installed # the necessary plugins. agent_data = self.get_plugin_data(plugin_name='agent', deployment_id=deployment.id) # cloudmock and mock_workflows should have been installed # on the management worker as local tasks installed = ['installed'] self.assertEqual(agent_data['local']['cloudmock'], installed) self.assertEqual(agent_data['local']['mock_workflows'], installed) self.undeploy_application(deployment.id, is_delete_deployment=True) # assert plugin installer uninstalled # the necessary plugins. agent_data = self.get_plugin_data(plugin_name='agent', deployment_id=deployment.id) uninstalled = ['installed', 'uninstalled'] self.assertEqual(agent_data['local']['cloudmock'], uninstalled) self.assertEqual(agent_data['local']['mock_workflows'], uninstalled) self.assertRaises(sh.ErrorReturnCode, self.execute_on_manager, 'test -d {0}'.format(deployment_dir_path))
def test_deployment_create_workflow_and_source_plugin(self): # Get the whole directory dsl_path = get_resource('dsl/plugin_tests') # Copy the blueprint folder into a temp dir, because we want to # create a plugin zip, in order to install it from source base_temp_dir = tempfile.mkdtemp() blueprint_dir = os.path.join(base_temp_dir, 'blueprint') shutil.copytree(dsl_path, blueprint_dir) blueprint_path = os.path.join(blueprint_dir, 'source_plugin.yaml') # Create a zip archive of the source_plugin = os.path.join(blueprint_dir, 'plugins', 'mock-plugin') plugin_zip = '{0}.zip'.format(source_plugin) try: create_zip(source_plugin, plugin_zip) deployment, _ = self.deploy_application(blueprint_path) deployment_folder = self._get_deployment_folder(deployment) plugin_path = self._get_plugin_path(deployment) # assert plugin installer installed the necessary plugin self._assert_path_exists_on_manager(deployment_folder, True) self._assert_path_exists_on_manager(plugin_path) self.undeploy_application(deployment.id, is_delete_deployment=True) # Retry several times, because uninstalling plugins may take time self._assert_paths_removed(deployment_folder, plugin_path) finally: shutil.rmtree(base_temp_dir)
def test_scaled_relationships(self): deployment_id = 'd{0}'.format(uuid.uuid4()) main_blueprint = self.generate_blueprint('create') + """ groups: group1: members: [node, depends_on_operation_node] policies: policy: type: cloudify.policies.scaling targets: [group1] properties: default_instances: 2 """ base_blueprint_path = utils.get_resource('dsl/mock_workflows.yaml') self.client.blueprints.upload(base_blueprint_path, 'mock_workflows') utils.wait_for_blueprint_upload('mock_workflows', self.client) main_blueprint_path = self.make_yaml_file(main_blueprint) _, execution_id = self.deploy_application(main_blueprint_path, deployment_id=deployment_id) task_graphs = self.client.tasks_graphs.list(execution_id, 'install') operations_info = {} operations_id = {} for graph in task_graphs: operations = self.client.operations.list(graph.id) for op in operations: operations_id[op.id] = {} operations_id[op.id]['dependencies'] = op.dependencies operations_id[op.id]['info'] = op.info try: cloudify_context = op.parameters['task_kwargs']['kwargs'][ '__cloudify_context'] except KeyError: continue op_name = cloudify_context['operation']['name'] node_id = cloudify_context['node_id'] operations_info[(op_name, node_id)] = {} operations_info[(op_name, node_id)]['containing_subgraph']\ = op.containing_subgraph operations_info[(op_name, node_id)]['op_name'] = op_name install_subgraph_ids = [ v['containing_subgraph'] for (__, node), v in operations_info.items() if ('depends_on_operation_node' in node and v['op_name'] == 'cloudify.interfaces.lifecycle.configure') ] self.assertEqual(len(install_subgraph_ids), 2) for install_id in install_subgraph_ids: next_tasks_info = [ operations_id[dep]['info'] for dep in operations_id[install_id]['dependencies'] ] self.assertCountEqual(['Node instance created', 'created'], next_tasks_info)
def upload_blueprint_resource(self, dsl_resource_path, blueprint_id, client=None): client = client or self.client blueprint = get_resource(dsl_resource_path) client.blueprints.upload(blueprint, entity_id=blueprint_id)
def check_main_blueprint(self): blueprint_id = 'manager_blueprint' self.inputs = dict(self.client_config) self.inputs.update({ 'external_network_id': os.getenv('external_network_id', 'dda079ce-12cf-4309-879a-8e67aec94de4'), 'example_subnet_cidr': '10.10.0.0/24', 'name_prefix': 'blueprint_', 'image_id': 'e41430f7-9131-495b-927f-e7dc4b8994c8', 'flavor_id': '3', 'agent_user': '******' }) dep, ex_id = self.deploy_application(test_utils.get_resource( os.path.join(self.plugin_root_directory, 'examples/manager/blueprint.yaml')), timeout_seconds=200, blueprint_id=blueprint_id, deployment_id=blueprint_id, inputs=self.inputs) self.undeploy_application(dep.id)
def _make_archive_file(self, blueprint_path, write_mode='w'): dsl_path = get_resource(blueprint_path) blueprint_dir = os.path.dirname(dsl_path) archive_location = tempfile.mkstemp()[1] arcname = os.path.basename(blueprint_dir) with tarfile.open(archive_location, write_mode) as tar: tar.add(blueprint_dir, arcname=arcname) return archive_location
def _start_a_workflow(self): # Start the create deployment workflow dsl_path = utils.get_resource('dsl/basic.yaml') blueprint_id = deployment_id = 'basic_{}'.format(uuid.uuid4()) self.client.blueprints.upload(dsl_path, blueprint_id) self.client.deployments.create(blueprint_id, deployment_id) utils.wait_for_deployment_creation_to_complete(deployment_id) return deployment_id
def _upload_mock_plugin(self): wagon_path = self._create_test_wagon('target-aware-mock') self.downloaded_archive_path = os.path.join( self.workdir, os.path.basename(wagon_path)) yaml_path = test_utils.get_resource('plugins/target-aware-mock/' 'plugin.yaml') with utils.zip_files([wagon_path, yaml_path]) as zip_path: self.client.plugins.upload(zip_path)
def _start_a_workflow(self): # Start the create deployment workflow dsl_path = utils.get_resource('dsl/basic.yaml') blueprint_id = deployment_id = 'basic_{}'.format(uuid.uuid4()) self.client.blueprints.upload(dsl_path, blueprint_id) self.client.deployments.create(blueprint_id, deployment_id) utils.wait_for_deployment_creation_to_complete(deployment_id) return deployment_id
def test_start_monitor_node_operation(self): dsl_path = get_resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = self.deploy_application(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['monitoring_operations_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('start_monitor', invocation['operation'])
def test_get_blueprint(self): dsl_path = get_resource("dsl/basic.yaml") blueprint_id = str(uuid.uuid4()) deployment, _ = self.deploy_application(dsl_path, blueprint_id=blueprint_id) self.assertEqual(blueprint_id, deployment.blueprint_id) blueprint = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, blueprint.id) self.assertTrue(len(blueprint['plan']) > 0)
def test_execute_operation_failure(self): deployment_id = str(uuid.uuid4()) dsl_path = get_resource("dsl/basic.yaml") try: self.deploy_application(dsl_path, deployment_id=deployment_id) self.fail('expected exception') except Exception as e: if e.message: self.logger.info(e.message) pass
def test_plugin_get_resource(self): dsl_path = get_resource("dsl/get_resource_in_plugin.yaml") deployment, _ = self.deploy_application(dsl_path) invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['get_resource_operation_invocation'] self.assertEquals(1, len(invocations)) invocation = invocations[0] with open(get_resource("dsl/basic.yaml")) as f: basic_data = f.read() # checking the resources are the correct data self.assertEquals(basic_data, invocation['res1_data']) self.assertEquals(basic_data, invocation['res2_data']) # checking the custom filepath provided is indeed where the second # resource was saved self.assertEquals(invocation['custom_filepath'], invocation['res2_path'])
def _deploy_aws_example(self, blueprint_id, blueprint_path, inputs=None): if not inputs: inputs = self.client_config # Deploy blueprint application self.deploy_application(test_utils.get_resource( os.path.join(self.plugin_root_directory, blueprint_path)), timeout_seconds=1200, blueprint_id=blueprint_id, deployment_id=blueprint_id, inputs=inputs)
def test_riemann_core_started_with_policies(self): """A riemann core is started if the blueprint defines policies """ dsl_path = get_resource('dsl/with_policies1.yaml') deployment, _ = self.deploy_application(dsl_path) self.assertTrue(riemann.is_riemann_core_up(deployment.id)) self.undeploy_application(deployment.id, is_delete_deployment=True) self.assertFalse(riemann.is_riemann_core_up(deployment.id))
def _create_deployment_from_blueprint_with_labels(self, new_labels=None): dsl_path = utils.get_resource('dsl/blueprint_with_labels.yaml') blueprint_id = deployment_id = 'd{0}'.format(uuid.uuid4()) self.client.blueprints.upload(dsl_path, blueprint_id) deployment = self.client.deployments.create(blueprint_id, deployment_id, labels=new_labels) utils.wait_for_deployment_creation_to_complete(self.env.container_id, deployment_id, self.client) return deployment
def test_riemann_core_not_started_without_policies(self): """A riemann core isn't started if there's no policies defined """ dsl_path = get_resource('dsl/without_policies.yaml') deployment, _ = self.deploy_application(dsl_path) self.assertFalse(riemann.is_riemann_core_up(deployment.id)) self.undeploy_application(deployment.id, is_delete_deployment=True) self.assertFalse(riemann.is_riemann_core_up(deployment.id))
def _deploy_shared_resource(self, deployment_id=SR_DEPLOYMENT, upload_blueprint=True, resource_visibility=VisibilityState.GLOBAL): shared_resource_blueprint = get_resource( 'dsl/blueprint_with_capabilities.yaml') self.deploy(shared_resource_blueprint if upload_blueprint else None, 'shared_resource_blueprint', deployment_id, blueprint_visibility=resource_visibility, deployment_visibility=resource_visibility)
def test_inject_properties_to_operation(self): dsl_path = get_resource("dsl/hardcoded_operation_properties.yaml") deployment, _ = self.deploy_application(dsl_path) states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment.id)['state'] invocations = self.get_plugin_data( plugin_name='testmockoperations', deployment_id=deployment.id)['mock_operation_invocation'] self.assertEqual(1, len(invocations)) invocation = invocations[0] self.assertEqual('mockpropvalue', invocation['mockprop']) self.assertEqual(states[0]['id'], invocation['id'])
def upload_mock_plugin(self, plugin_name, plugin_path=None): if not plugin_path: plugin_path = test_utils.get_resource( 'plugins/{0}'.format(plugin_name)) wagon_path = self._create_test_wagon(plugin_path) yaml_path = os.path.join(plugin_path, 'plugin.yaml') with utils.zip_files([wagon_path, yaml_path]) as zip_path: self.client.plugins.upload(zip_path) self._wait_for_execution_by_wf_name('install_plugin')
def check_blueprint(self, blueprint_id, blueprint_path, deployment_inputs, timeout=None): self.add_cleanup_deployment(blueprint_id) self.deploy_application(test_utils.get_resource(blueprint_path), timeout_seconds=timeout, blueprint_id=blueprint_id, deployment_id=blueprint_id, inputs=deployment_inputs) self.undeploy_application(blueprint_id, timeout)
def upload_mock_plugin(self, plugin_name, plugin_path=None): self.logger.info('Starting uploading {0} from {1}...'.format( plugin_name, plugin_path)) if not plugin_path: plugin_path = test_utils.get_resource( 'plugins/{0}'.format(plugin_name)) wagon_paths = self._get_or_create_wagon(plugin_path) for wagon_path in wagon_paths: yaml_path = os.path.join(plugin_path, 'plugin.yaml') with utils.zip_files([wagon_path, yaml_path]) as zip_path: self.client.plugins.upload(zip_path) self.logger.info('Finished uploading {0}...'.format(plugin_name))
def test_dependencies_order_with_two_nodes(self): dsl_path = get_resource("dsl/dependencies_order_with_two_nodes.yaml") blueprint_id = self.id() deployment, _ = self.deploy_application(dsl_path, blueprint_id=blueprint_id) self.assertEquals(blueprint_id, deployment.blueprint_id) states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment.id)['state'] self.assertEquals(2, len(states)) self.assertTrue('host_node' in states[0]['id']) self.assertTrue('db_node' in states[1]['id'])
def test_get_attribute(self): # assertion happens in operation get_attribute.tasks.assertion dsl_path = get_resource('dsl/get_attributes.yaml') deployment, _ = self.deploy_application(dsl_path) data = self.get_plugin_data(plugin_name='get_attribute', deployment_id=deployment.id) invocations = data['invocations'] self.assertEqual( 2, len([i for i in invocations if i == context.RELATIONSHIP_INSTANCE])) self.assertEqual( 1, len([i for i in invocations if i == context.NODE_INSTANCE]))
def test_execute_operation(self): dsl_path = get_resource('dsl/basic.yaml') blueprint_id = self.id() deployment, _ = self.deploy_application(dsl_path, blueprint_id=blueprint_id, timeout_seconds=15) self.assertEqual(blueprint_id, deployment.blueprint_id) machines = self.get_plugin_data( plugin_name='cloudmock', deployment_id=deployment.id)['machines'] self.assertEquals(1, len(machines)) outputs = self.client.deployments.outputs.get(deployment.id).outputs self.assertEquals(outputs['ip_address'], '')
def test_cloudify_runtime_properties_injection(self): dsl_path = get_resource("dsl/dependencies_order_with_two_nodes.yaml") deployment, _ = self.deploy_application(dsl_path) states = self.get_plugin_data(plugin_name='testmockoperations', deployment_id=deployment.id)['state'] node_runtime_props = None for k, v in states[1]['capabilities'].iteritems(): if 'host_node' in k: node_runtime_props = v break self.assertEquals('value1', node_runtime_props['property1']) self.assertEquals( 1, len(node_runtime_props), msg='Expected 2 but contains: {0}'.format(node_runtime_props))
def test_deployment_with_the_same_id(self): """Create multiple deployments with the same ID. The goal of this test is run multiple create/delete deployment cycles to find out if there's any race condition that prevents the creation of a deployment of the same ID just after it's been deleted. """ dsl_path = utils.get_resource('dsl/basic.yaml') blueprint_id = deployment_id = 'd{0}'.format(uuid.uuid4()) self.client.blueprints.upload(dsl_path, blueprint_id) for _ in range(self.DEPLOYMENTS_COUNT): self.client.deployments.create( blueprint_id, deployment_id, skip_plugins_validation=True) utils.wait_for_deployment_creation_to_complete(deployment_id) self.client.deployments.delete(deployment_id) utils.wait_for_deployment_deletion_to_complete(deployment_id)
def _upload_license(self, license): license_path = get_resource('licenses/{0}'.format(license)) self.client.license.upload(license_path)