def test_redirect_templates_paths(self, mock_hc_templ_parse, mock_hc_env_parse, mock_hc_get_templ_cont, mock_hc_process): utils.process_multiple_environments(self.created_env_files, self.tht_root, self.user_tht_root) mock_hc_process.assert_has_calls([ mock.call(env_path='./inside.yaml'), mock.call(env_path='/twd/templates/abs.yaml'), mock.call(env_path='/twd/templates/puppet/foo.yaml'), mock.call(env_path='/twd/templates/environments/myenv.yaml'), mock.call(env_path='/tmp/thtroot42/notouch.yaml'), mock.call(env_path='./tmp/thtroot/notouch2.yaml'), mock.call(env_path='../outside.yaml') ])
def test_rewrite_env_files(self, mock_temp, mock_open, mock_yaml_load, mock_yaml_dump, mock_hc_templ_parse, mock_hc_env_parse, mock_hc_get_templ_cont, mock_hc_process): def hc_process(*args, **kwargs): if 'abs.yaml' in kwargs['env_path']: raise hc_exc.CommandError else: return ({}, {}) mock_hc_process.side_effect = hc_process rewritten_env = {'resource_registry': { 'OS::Foo::Bar': '/twd/outside.yaml', 'OS::Foo::Baz': '/twd/templates/inside.yaml', 'OS::Foo::Qux': '/twd/templates/abs.yaml', 'OS::Foo::Quux': '/tmp/thtroot42/notouch.yaml', 'OS::Foo::Corge': '/twd/templates/puppet/foo.yaml' } } myenv = {'resource_registry': { 'OS::Foo::Bar': '../outside.yaml', 'OS::Foo::Baz': './inside.yaml', 'OS::Foo::Qux': '/tmp/thtroot/abs.yaml', 'OS::Foo::Quux': '/tmp/thtroot42/notouch.yaml', 'OS::Foo::Corge': '/tmp/thtroot/puppet/foo.yaml' } } mock_yaml_load.return_value = myenv utils.process_multiple_environments(self.created_env_files, self.tht_root, self.user_tht_root, False) mock_yaml_dump.assert_has_calls([mock.call(rewritten_env, default_flow_style=False)])
def _deploy_tripleo_heat_templates(self, orchestration_client, parsed_args): """Deploy the fixed templates in TripleO Heat Templates""" # sets self.tht_render to the working dir with deployed templates environments = self._setup_heat_environments(parsed_args) # rewrite paths to consume t-h-t env files from the working dir self.log.debug(_("Processing environment files %s") % environments) env_files, env = utils.process_multiple_environments( environments, self.tht_render, parsed_args.templates, cleanup=parsed_args.cleanup) self._prepare_container_images(env) self.log.debug(_("Getting template contents")) template_path = os.path.join(self.tht_render, 'overcloud.yaml') template_files, template = \ template_utils.get_template_contents(template_path) files = dict(list(template_files.items()) + list(env_files.items())) stack_name = parsed_args.stack self.log.debug(_("Deploying stack: %s") % stack_name) self.log.debug(_("Deploying template: %s") % template) self.log.debug(_("Deploying environment: %s") % env) self.log.debug(_("Deploying files: %s") % files) stack_args = { 'stack_name': stack_name, 'template': template, 'environment': env, 'files': files, } if parsed_args.timeout: stack_args['timeout_mins'] = parsed_args.timeout self.log.warning(_("** Performing Heat stack create.. **")) stack = orchestration_client.stacks.create(**stack_args) stack_id = stack['stack']['id'] return "%s/%s" % (stack_name, stack_id)
def _deploy_tripleo_heat_templates(self, orchestration_client, parsed_args): """Deploy the fixed templates in TripleO Heat Templates""" # sets self.tht_render to the temporary work dir after it's done environments = self._setup_heat_environments(parsed_args) self.log.debug("Processing environment files %s" % environments) env_files, env = utils.process_multiple_environments( environments, self.tht_render, parsed_args.templates, cleanup=parsed_args.cleanup) self.log.debug("Getting template contents") template_path = os.path.join(self.tht_render, 'overcloud.yaml') template_files, template = \ template_utils.get_template_contents(template_path) files = dict(list(template_files.items()) + list(env_files.items())) stack_name = parsed_args.stack self.log.debug("Deploying stack: %s", stack_name) self.log.debug("Deploying template: %s", template) self.log.debug("Deploying environment: %s", env) self.log.debug("Deploying files: %s", files) stack_args = { 'stack_name': stack_name, 'template': template, 'environment': env, 'files': files, } if parsed_args.timeout: stack_args['timeout_mins'] = parsed_args.timeout self.log.info("Performing Heat stack create") stack = orchestration_client.stacks.create(**stack_args) stack_id = stack['stack']['id'] return "%s/%s" % (stack_name, stack_id)
def _deploy_tripleo_heat_templates(self, stack, parsed_args, tht_root, user_tht_root): """Deploy the fixed templates in TripleO Heat Templates""" plans = plan_management.list_deployment_plans(self.clients) generate_passwords = not parsed_args.disable_password_generation # TODO(d0ugal): We need to put a more robust strategy in place here to # handle updating plans. if parsed_args.stack in plans: # Upload the new plan templates to swift to replace the existing # templates. plan_management.update_plan_from_templates( self.clients, parsed_args.stack, tht_root, parsed_args.roles_file, generate_passwords, parsed_args.plan_environment_file, parsed_args.networks_file, type(self)._keep_env_on_update) else: plan_management.create_plan_from_templates( self.clients, parsed_args.stack, tht_root, parsed_args.roles_file, generate_passwords, parsed_args.plan_environment_file, parsed_args.networks_file) # Get any missing (e.g j2 rendered) files from the plan to tht_root self._download_missing_files_from_plan(tht_root, parsed_args.stack) print("Processing templates in the directory {0}".format( os.path.abspath(tht_root))) self.log.debug("Creating Environment files") env = {} created_env_files = [] if parsed_args.environment_directories: created_env_files.extend( utils.load_environment_directories( parsed_args.environment_directories)) parameters = {} if stack: try: # If user environment already exist then keep it user_env = yaml.safe_load( self.object_client.get_object( parsed_args.stack, constants.USER_ENVIRONMENT)[1]) template_utils.deep_update(env, user_env) except ClientException: pass parameters.update(self._update_parameters(parsed_args, stack)) template_utils.deep_update( env, self._create_parameters_env(parameters, tht_root, parsed_args.stack)) if parsed_args.rhel_reg: reg_env_files, reg_env = self._create_registration_env( parsed_args, tht_root) created_env_files.extend(reg_env_files) template_utils.deep_update(env, reg_env) if parsed_args.environment_files: created_env_files.extend(parsed_args.environment_files) self.log.debug("Processing environment files %s" % created_env_files) env_files, localenv = utils.process_multiple_environments( created_env_files, tht_root, user_tht_root, cleanup=not parsed_args.no_cleanup) template_utils.deep_update(env, localenv) if stack: bp_cleanup = self._create_breakpoint_cleanup_env( tht_root, parsed_args.stack) template_utils.deep_update(env, bp_cleanup) # FIXME(shardy) It'd be better to validate this via mistral # e.g part of the plan create/update workflow number_controllers = int(parameters.get('ControllerCount', 0)) if number_controllers > 1: if not env.get('parameter_defaults').get('NtpServer'): raise exceptions.InvalidConfiguration( 'Specify --ntp-server as parameter or NtpServer in ' 'environments when using multiple controllers ' '(with HA).') self._try_overcloud_deploy_with_compat_yaml( tht_root, stack, parsed_args.stack, parameters, env_files, parsed_args.timeout, env, parsed_args.update_plan_only, parsed_args.run_validations, parsed_args.skip_deploy_identifier, parsed_args.plan_environment_file)